mirror of
https://github.com/anyproto/anytype-heart.git
synced 2025-06-08 05:47:07 +09:00
GO-4140 Merge remote-tracking branch 'origin/main' into go-4140-epic-web-publishing-go
This commit is contained in:
commit
9f76d4a99a
194 changed files with 15576 additions and 5362 deletions
27
.github/workflows/build.yml
vendored
27
.github/workflows/build.yml
vendored
|
@ -10,7 +10,7 @@ on:
|
|||
run-on-runner:
|
||||
description: 'Specify the runner to use'
|
||||
required: true
|
||||
default: 'ARM64'
|
||||
default: 'arm64'
|
||||
perf-test:
|
||||
description: 'Run perf test times'
|
||||
required: true
|
||||
|
@ -32,19 +32,19 @@ permissions:
|
|||
name: Build
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ github.event_name == 'push' && 'macos-12' || (github.event.inputs.run-on-runner || 'ARM64') }}
|
||||
runs-on: ${{ github.event_name == 'push' && 'arm64' || (github.event.inputs.run-on-runner || 'arm64') }}
|
||||
steps:
|
||||
- name: validate agent
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" && "${{ github.event.inputs.run-on-runner }}" != "ARM64" ]]; then
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" && "${{ github.event.inputs.run-on-runner }}" != "arm64" ]]; then
|
||||
echo "Invalid runner"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v1
|
||||
with:
|
||||
go-version: 1.22.8
|
||||
if: github.event.inputs.run-on-runner != 'ARM64' && github.event_name != 'schedule'
|
||||
go-version: 1.23.2
|
||||
if: runner.name != 'mac-mini-org-heart'
|
||||
- name: Setup GO
|
||||
run: |
|
||||
go version
|
||||
|
@ -61,7 +61,12 @@ jobs:
|
|||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 HOMEBREW_NO_AUTO_UPDATE=1 HOMEBREW_NO_INSTALL_CLEANUP=1 brew install grpcurl
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 HOMEBREW_NO_AUTO_UPDATE=1 HOMEBREW_NO_INSTALL_CLEANUP=1 brew tap messense/macos-cross-toolchains && brew install x86_64-unknown-linux-musl
|
||||
npm i -g node-gyp
|
||||
if: github.event.inputs.run-on-runner != 'ARM64' && github.event_name != 'schedule'
|
||||
echo "Azaza"
|
||||
protoc --version
|
||||
which x86_64-w64-mingw32-gcc
|
||||
which protoc
|
||||
echo "Azaza2"
|
||||
if: runner.name != 'mac-mini-org-heart'
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- uses: actions/cache@v3
|
||||
|
@ -73,7 +78,7 @@ jobs:
|
|||
${{ runner.os }}-go-${{ matrix.go-version }}-
|
||||
- name: Install old MacOS SDK (for backward compatibility of CGO)
|
||||
run: source .github/install_macos_sdk.sh 10.15
|
||||
if: github.event.inputs.run-on-runner != 'ARM64' && github.event_name != 'schedule'
|
||||
if: runner.name != 'mac-mini-org-heart'
|
||||
- name: Set env vars
|
||||
env:
|
||||
UNSPLASH_KEY: ${{ secrets.UNSPLASH_KEY }}
|
||||
|
@ -133,14 +138,14 @@ jobs:
|
|||
TEST_MNEMONIC: ${{ secrets.TEST_MNEMONIC }}
|
||||
PROM_KEY: ${{ secrets.PROMETHEUS_USERNAME }}
|
||||
PROM_PASSWORD: ${{ secrets.PROMETHEUS_PASSWORD }}
|
||||
if: github.event.inputs.perf-test != '0' || github.event_name == 'schedule'
|
||||
if: github.event.inputs.perf-test != '0'
|
||||
- name: Archive perf tests results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: pprofs
|
||||
path: |
|
||||
*.pprof
|
||||
if: github.event.inputs.perf-test != '0' || github.event_name == 'schedule'
|
||||
if: github.event.inputs.perf-test != '0'
|
||||
- name: end run perf tests
|
||||
run: |
|
||||
rm -rf *.pprof
|
||||
|
@ -149,7 +154,7 @@ jobs:
|
|||
gh run watch ${{ github.run_id }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
if: github.event.inputs.perf-test != '0' || github.event_name == 'schedule'
|
||||
if: github.event.inputs.perf-test != '0'
|
||||
- name: Make JS protos
|
||||
run: |
|
||||
make protos-js
|
||||
|
@ -252,7 +257,7 @@ jobs:
|
|||
cp pkg/lib/bundle/internalTypes.json ./json
|
||||
- name: Compile android lib
|
||||
run: |
|
||||
gomobile bind -tags "envproduction nogrpcserver gomobile nowatchdog nosigar nomutexdeadlockdetector timetzdata rasterizesvg" -ldflags "$FLAGS" -v -target=android -androidapi 19 -o lib.aar github.com/anyproto/anytype-heart/clientlibrary/service github.com/anyproto/anytype-heart/core || true
|
||||
gomobile bind -tags "envproduction nogrpcserver gomobile nowatchdog nosigar nomutexdeadlockdetector timetzdata rasterizesvg" -ldflags "$FLAGS" -v -target=android -androidapi 26 -o lib.aar github.com/anyproto/anytype-heart/clientlibrary/service github.com/anyproto/anytype-heart/core || true
|
||||
gtar --exclude ".*" -czvf android_lib_${VERSION}.tar.gz lib.aar protobuf json
|
||||
mv android_lib_${VERSION}.tar.gz .release/
|
||||
- name: Publish android lib to maven
|
||||
|
|
2
Makefile
2
Makefile
|
@ -160,7 +160,7 @@ build-android: setup-go setup-gomobile
|
|||
ifdef ANY_SYNC_NETWORK
|
||||
@$(eval TAGS := $(TAGS) envnetworkcustom)
|
||||
endif
|
||||
gomobile bind -tags "$(TAGS)" -ldflags "$(FLAGS)" $(BUILD_FLAGS) -target=android -androidapi 19 -o lib.aar github.com/anyproto/anytype-heart/clientlibrary/service github.com/anyproto/anytype-heart/core
|
||||
gomobile bind -tags "$(TAGS)" -ldflags "$(FLAGS)" $(BUILD_FLAGS) -target=android -androidapi 26 -o lib.aar github.com/anyproto/anytype-heart/clientlibrary/service github.com/anyproto/anytype-heart/core
|
||||
@mkdir -p dist/android/ && mv lib.aar dist/android/
|
||||
@go mod tidy
|
||||
|
||||
|
|
|
@ -44,11 +44,6 @@ func RunDebugServer(addr *C.char) {
|
|||
service.RunDebugServer(C.GoString(addr))
|
||||
}
|
||||
|
||||
//export SetLogLevels
|
||||
func SetLogLevels(levels *C.char) {
|
||||
service.SetLogLevels(C.GoString(levels))
|
||||
}
|
||||
|
||||
//export Command
|
||||
func Command(cmd *C.char, data unsafe.Pointer, dataLen C.int, callback C.proxyFunc, callbackContext unsafe.Pointer) {
|
||||
service.CommandAsync(C.GoString(cmd), C.GoBytes(data, dataLen), func(data []byte) {
|
||||
|
|
|
@ -64,10 +64,6 @@ func RunDebugServer(addr string) {
|
|||
})
|
||||
}
|
||||
|
||||
func SetLogLevels(levels string) {
|
||||
logging.SetLogLevels(levels)
|
||||
}
|
||||
|
||||
func SetEnv(key, value string) {
|
||||
os.Setenv(key, value)
|
||||
}
|
||||
|
|
|
@ -25,331 +25,335 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
|
|||
func init() { proto.RegisterFile("pb/protos/service/service.proto", fileDescriptor_93a29dc403579097) }
|
||||
|
||||
var fileDescriptor_93a29dc403579097 = []byte{
|
||||
// 5181 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x9d, 0xdd, 0x6f, 0x25, 0x39,
|
||||
0x56, 0xc0, 0xe7, 0xbe, 0x30, 0x50, 0xcb, 0x0e, 0x70, 0x07, 0x86, 0xd9, 0x61, 0xb7, 0xbf, 0xbb,
|
||||
0xd3, 0xdd, 0x49, 0x6e, 0x32, 0xdd, 0xd3, 0x33, 0xab, 0x5d, 0x24, 0x94, 0x4e, 0x3a, 0x99, 0xb0,
|
||||
0x49, 0x3a, 0xe4, 0xde, 0x74, 0x4b, 0x23, 0x21, 0xe1, 0x54, 0x39, 0xf7, 0x16, 0xa9, 0x5b, 0xae,
|
||||
0xad, 0xf2, 0xbd, 0xe9, 0xbb, 0x08, 0x04, 0x62, 0x05, 0x02, 0x81, 0x58, 0xf1, 0xf5, 0x8a, 0xc4,
|
||||
0x5f, 0xc3, 0xe3, 0x3e, 0xf2, 0x88, 0x66, 0xc4, 0xff, 0xb1, 0x2a, 0xdb, 0xe5, 0x8f, 0x53, 0x3e,
|
||||
0xae, 0xca, 0x3c, 0x75, 0xeb, 0x9e, 0xdf, 0x39, 0xc7, 0x2e, 0xdb, 0xc7, 0xc7, 0x2e, 0x97, 0x13,
|
||||
0xdd, 0x2e, 0x2e, 0xb6, 0x8a, 0x92, 0x71, 0x56, 0x6d, 0x55, 0xb4, 0x5c, 0xa6, 0x31, 0x6d, 0xfe,
|
||||
0x1d, 0x89, 0x9f, 0x87, 0xef, 0x93, 0x7c, 0xc5, 0x57, 0x05, 0xfd, 0xe4, 0x63, 0x43, 0xc6, 0x6c,
|
||||
0x3e, 0x27, 0x79, 0x52, 0x49, 0xe4, 0x93, 0x8f, 0x8c, 0x84, 0x2e, 0x69, 0xce, 0xd5, 0xef, 0xcf,
|
||||
0x7e, 0xfe, 0xff, 0x83, 0xe8, 0x83, 0xdd, 0x2c, 0xa5, 0x39, 0xdf, 0x55, 0x1a, 0xc3, 0xaf, 0xa2,
|
||||
0xef, 0xee, 0x14, 0xc5, 0x01, 0xe5, 0x6f, 0x68, 0x59, 0xa5, 0x2c, 0x1f, 0xde, 0x1f, 0x29, 0x07,
|
||||
0xa3, 0xb3, 0x22, 0x1e, 0xed, 0x14, 0xc5, 0xc8, 0x08, 0x47, 0x67, 0xf4, 0xa7, 0x0b, 0x5a, 0xf1,
|
||||
0x4f, 0x1e, 0x84, 0xa1, 0xaa, 0x60, 0x79, 0x45, 0x87, 0x97, 0xd1, 0xef, 0xec, 0x14, 0xc5, 0x98,
|
||||
0xf2, 0x3d, 0x5a, 0x57, 0x60, 0xcc, 0x09, 0xa7, 0xc3, 0xb5, 0x96, 0xaa, 0x0b, 0x68, 0x1f, 0x8f,
|
||||
0xbb, 0x41, 0xe5, 0x67, 0x12, 0x7d, 0xa7, 0xf6, 0x33, 0x5b, 0xf0, 0x84, 0x5d, 0xe7, 0xc3, 0xbb,
|
||||
0x6d, 0x45, 0x25, 0xd2, 0xb6, 0xef, 0x85, 0x10, 0x65, 0xf5, 0x6d, 0xf4, 0x9b, 0x6f, 0x49, 0x96,
|
||||
0x51, 0xbe, 0x5b, 0xd2, 0xba, 0xe0, 0xae, 0x8e, 0x14, 0x8d, 0xa4, 0x4c, 0xdb, 0xbd, 0x1f, 0x64,
|
||||
0x94, 0xe1, 0xaf, 0xa2, 0xef, 0x4a, 0xc9, 0x19, 0x8d, 0xd9, 0x92, 0x96, 0x43, 0xaf, 0x96, 0x12,
|
||||
0x22, 0x8f, 0xbc, 0x05, 0x41, 0xdb, 0xbb, 0x2c, 0x5f, 0xd2, 0x92, 0xfb, 0x6d, 0x2b, 0x61, 0xd8,
|
||||
0xb6, 0x81, 0x94, 0xed, 0x7f, 0x18, 0x44, 0xdf, 0xdf, 0x89, 0x63, 0xb6, 0xc8, 0xf9, 0x11, 0x8b,
|
||||
0x49, 0x76, 0x94, 0xe6, 0x57, 0x27, 0xf4, 0x7a, 0x77, 0x56, 0xf3, 0xf9, 0x94, 0x0e, 0x9f, 0xbb,
|
||||
0x4f, 0x55, 0xa2, 0x23, 0xcd, 0x8e, 0x6c, 0x58, 0xfb, 0xfe, 0xec, 0x66, 0x4a, 0xaa, 0x2c, 0xff,
|
||||
0x32, 0x88, 0x6e, 0xc1, 0xb2, 0x8c, 0x59, 0xb6, 0xa4, 0xa6, 0x34, 0x2f, 0x3a, 0x0c, 0xbb, 0xb8,
|
||||
0x2e, 0xcf, 0xe7, 0x37, 0x55, 0x53, 0x25, 0xca, 0xa2, 0x0f, 0xed, 0xee, 0x32, 0xa6, 0x95, 0x18,
|
||||
0x4e, 0x4f, 0xf0, 0x1e, 0xa1, 0x10, 0xed, 0xf9, 0x69, 0x1f, 0x54, 0x79, 0x4b, 0xa3, 0xa1, 0xf2,
|
||||
0x96, 0xb1, 0x4a, 0x3b, 0x7b, 0xec, 0xb5, 0x60, 0x11, 0xda, 0xd7, 0x93, 0x1e, 0xa4, 0x72, 0xf5,
|
||||
0x67, 0xd1, 0x6f, 0xbd, 0x65, 0xe5, 0x55, 0x55, 0x90, 0x98, 0xaa, 0xa1, 0xf0, 0xd0, 0xd5, 0x6e,
|
||||
0xa4, 0x70, 0x34, 0x3c, 0xea, 0xc2, 0xac, 0x4e, 0xdb, 0x08, 0x5f, 0x17, 0x14, 0xc6, 0x20, 0xa3,
|
||||
0x58, 0x0b, 0xb1, 0x4e, 0x0b, 0x21, 0x65, 0xfb, 0x2a, 0x1a, 0x1a, 0xdb, 0x17, 0x7f, 0x4e, 0x63,
|
||||
0xbe, 0x93, 0x24, 0xb0, 0x55, 0x8c, 0xae, 0x20, 0x46, 0x3b, 0x49, 0x82, 0xb5, 0x8a, 0x1f, 0x55,
|
||||
0xce, 0xae, 0xa3, 0x8f, 0x80, 0xb3, 0xa3, 0xb4, 0x12, 0x0e, 0x37, 0xc3, 0x56, 0x14, 0xa6, 0x9d,
|
||||
0x8e, 0xfa, 0xe2, 0xca, 0xf1, 0x5f, 0x0f, 0xa2, 0xef, 0x79, 0x3c, 0x9f, 0xd1, 0x39, 0x5b, 0xd2,
|
||||
0xe1, 0x76, 0xb7, 0x35, 0x49, 0x6a, 0xff, 0x9f, 0xde, 0x40, 0xc3, 0xd3, 0x4d, 0xc6, 0x34, 0xa3,
|
||||
0x31, 0x47, 0xbb, 0x89, 0x14, 0x77, 0x76, 0x13, 0x8d, 0x59, 0x23, 0xac, 0x11, 0x1e, 0x50, 0xbe,
|
||||
0xbb, 0x28, 0x4b, 0x9a, 0x73, 0xb4, 0x2d, 0x0d, 0xd2, 0xd9, 0x96, 0x0e, 0xea, 0xa9, 0xcf, 0x01,
|
||||
0xe5, 0x3b, 0x59, 0x86, 0xd6, 0x47, 0x8a, 0x3b, 0xeb, 0xa3, 0x31, 0xe5, 0x21, 0x8e, 0x7e, 0xdb,
|
||||
0x7a, 0x62, 0xfc, 0x30, 0xbf, 0x64, 0x43, 0xfc, 0x59, 0x08, 0xb9, 0xf6, 0xb1, 0xd6, 0xc9, 0x79,
|
||||
0xaa, 0xf1, 0xea, 0x5d, 0xc1, 0x4a, 0xbc, 0x59, 0xa4, 0xb8, 0xb3, 0x1a, 0x1a, 0x53, 0x1e, 0xfe,
|
||||
0x34, 0xfa, 0x40, 0x45, 0xc9, 0x66, 0x3e, 0x7b, 0xe0, 0x0d, 0xa1, 0x70, 0x42, 0x7b, 0xd8, 0x41,
|
||||
0x99, 0xe0, 0xa0, 0x64, 0x2a, 0xf8, 0xdc, 0xf7, 0xea, 0x81, 0xd0, 0xf3, 0x20, 0x0c, 0xb5, 0x6c,
|
||||
0xef, 0xd1, 0x8c, 0xa2, 0xb6, 0xa5, 0xb0, 0xc3, 0xb6, 0x86, 0x94, 0xed, 0x32, 0xfa, 0x3d, 0xfd,
|
||||
0x58, 0xea, 0x79, 0x54, 0xc8, 0xeb, 0x20, 0xbd, 0x8e, 0xd4, 0xdb, 0x86, 0xb4, 0xaf, 0x8d, 0x7e,
|
||||
0x70, 0xab, 0x3e, 0x6a, 0x04, 0xfa, 0xeb, 0x03, 0xc6, 0xdf, 0x83, 0x30, 0xa4, 0x6c, 0xff, 0xe3,
|
||||
0x20, 0xfa, 0x81, 0x92, 0xbd, 0xca, 0xc9, 0x45, 0x46, 0xc5, 0x94, 0x78, 0x42, 0xf9, 0x35, 0x2b,
|
||||
0xaf, 0xc6, 0xab, 0x3c, 0x46, 0xa6, 0x7f, 0x3f, 0xdc, 0x31, 0xfd, 0xa3, 0x4a, 0x56, 0xc6, 0xa7,
|
||||
0x2a, 0xca, 0x59, 0x01, 0x33, 0xbe, 0xa6, 0x06, 0x9c, 0x15, 0x58, 0xc6, 0xe7, 0x22, 0x2d, 0xab,
|
||||
0xc7, 0x75, 0xd8, 0xf4, 0x5b, 0x3d, 0xb6, 0xe3, 0xe4, 0xbd, 0x10, 0x62, 0xc2, 0x56, 0xd3, 0x81,
|
||||
0x59, 0x7e, 0x99, 0x4e, 0xcf, 0x8b, 0xa4, 0xee, 0xc6, 0x4f, 0xfc, 0x3d, 0xd4, 0x42, 0x90, 0xb0,
|
||||
0x85, 0xa0, 0xca, 0xdb, 0x3f, 0x9b, 0xc4, 0x48, 0x0d, 0xa5, 0xfd, 0x92, 0xcd, 0x8f, 0xe8, 0x94,
|
||||
0xc4, 0x2b, 0x35, 0xfe, 0x3f, 0x0b, 0x0d, 0x3c, 0x48, 0xeb, 0x42, 0xbc, 0xb8, 0xa1, 0x96, 0x2a,
|
||||
0xcf, 0x7f, 0x0d, 0xa2, 0x07, 0x4d, 0xf5, 0x67, 0x24, 0x9f, 0x52, 0xd5, 0x9e, 0xb2, 0xf4, 0x3b,
|
||||
0x79, 0x72, 0x46, 0x2b, 0x4e, 0x4a, 0x3e, 0xfc, 0x91, 0xbf, 0x92, 0x21, 0x1d, 0x5d, 0xb6, 0x1f,
|
||||
0x7f, 0x2b, 0x5d, 0xd3, 0xea, 0xe3, 0x3a, 0xb0, 0xa9, 0x10, 0xe0, 0xb6, 0xba, 0x90, 0xc0, 0x00,
|
||||
0x70, 0x2f, 0x84, 0x98, 0x56, 0x17, 0x82, 0xc3, 0x7c, 0x99, 0x72, 0x7a, 0x40, 0x73, 0x5a, 0xb6,
|
||||
0x5b, 0x5d, 0xaa, 0xba, 0x08, 0xd2, 0xea, 0x08, 0x6a, 0x82, 0x8d, 0xe3, 0x4d, 0x4f, 0x8e, 0xeb,
|
||||
0x01, 0x23, 0xad, 0xe9, 0x71, 0xa3, 0x1f, 0x6c, 0x56, 0x77, 0x96, 0xcf, 0x33, 0xba, 0x64, 0x57,
|
||||
0x70, 0x75, 0x67, 0x9b, 0x90, 0x00, 0xb2, 0xba, 0xf3, 0x82, 0x66, 0x06, 0xb3, 0xfc, 0xbc, 0x49,
|
||||
0xe9, 0x35, 0x98, 0xc1, 0x6c, 0xe5, 0x5a, 0x8c, 0xcc, 0x60, 0x1e, 0x4c, 0x79, 0x38, 0x89, 0x7e,
|
||||
0x43, 0x08, 0xff, 0x98, 0xa5, 0xf9, 0xf0, 0xb6, 0x47, 0xa9, 0x16, 0x68, 0xab, 0x77, 0x70, 0x00,
|
||||
0x94, 0xb8, 0xfe, 0x75, 0x97, 0xe4, 0x31, 0xcd, 0xbc, 0x25, 0x36, 0xe2, 0x60, 0x89, 0x1d, 0xcc,
|
||||
0xa4, 0x0e, 0x42, 0x58, 0xc7, 0xaf, 0xf1, 0x8c, 0x94, 0x69, 0x3e, 0x1d, 0xfa, 0x74, 0x2d, 0x39,
|
||||
0x92, 0x3a, 0xf8, 0x38, 0xd0, 0x85, 0x95, 0xe2, 0x4e, 0x51, 0x94, 0x75, 0x58, 0xf4, 0x75, 0x61,
|
||||
0x17, 0x09, 0x76, 0xe1, 0x16, 0xea, 0xf7, 0xb6, 0x47, 0xe3, 0x2c, 0xcd, 0x83, 0xde, 0x14, 0xd2,
|
||||
0xc7, 0x9b, 0x41, 0x41, 0xe7, 0x3d, 0xa2, 0x64, 0x49, 0x9b, 0x9a, 0xf9, 0x9e, 0x8c, 0x0d, 0x04,
|
||||
0x3b, 0x2f, 0x00, 0xcd, 0x3a, 0x4d, 0x88, 0x8f, 0xc9, 0x15, 0xad, 0x1f, 0x30, 0xad, 0xe7, 0xb5,
|
||||
0xa1, 0x4f, 0xdf, 0x21, 0x90, 0x75, 0x9a, 0x9f, 0x54, 0xae, 0x16, 0xd1, 0x47, 0x42, 0x7e, 0x4a,
|
||||
0x4a, 0x9e, 0xc6, 0x69, 0x41, 0xf2, 0x26, 0xff, 0xf7, 0x8d, 0xeb, 0x16, 0xa5, 0x5d, 0x6e, 0xf6,
|
||||
0xa4, 0x95, 0xdb, 0xff, 0x1c, 0x44, 0x77, 0xa1, 0xdf, 0x53, 0x5a, 0xce, 0x53, 0xb1, 0x8c, 0xac,
|
||||
0x64, 0x10, 0x1e, 0x7e, 0x11, 0x36, 0xda, 0x52, 0xd0, 0xa5, 0xf9, 0xe1, 0xcd, 0x15, 0x55, 0xc1,
|
||||
0xfe, 0x24, 0x8a, 0xe4, 0x72, 0x45, 0x2c, 0x29, 0xdd, 0x51, 0xab, 0xd6, 0x31, 0xce, 0x7a, 0xf2,
|
||||
0x6e, 0x80, 0x30, 0x53, 0x85, 0xfc, 0x5d, 0xac, 0x94, 0x87, 0x5e, 0x0d, 0x21, 0x42, 0xa6, 0x0a,
|
||||
0x80, 0xc0, 0x82, 0x8e, 0x67, 0xec, 0xda, 0x5f, 0xd0, 0x5a, 0x12, 0x2e, 0xa8, 0x22, 0x4c, 0x22,
|
||||
0x28, 0x7f, 0x3f, 0x5d, 0x5c, 0x64, 0x69, 0x35, 0x03, 0x89, 0xa0, 0xd2, 0x51, 0x42, 0x24, 0x11,
|
||||
0x6c, 0x41, 0x66, 0x5f, 0x4c, 0x3d, 0x04, 0xdf, 0xbe, 0x58, 0x53, 0xc5, 0xd0, 0xbe, 0x18, 0x64,
|
||||
0x94, 0x61, 0x16, 0xfd, 0xae, 0x6d, 0xf8, 0x25, 0x63, 0x57, 0x73, 0x52, 0x5e, 0x0d, 0x9f, 0xe2,
|
||||
0xca, 0x0d, 0xa3, 0x1d, 0xad, 0xf7, 0x62, 0x4d, 0xc8, 0xb1, 0x1d, 0xd6, 0x49, 0xcc, 0x79, 0x99,
|
||||
0x81, 0x90, 0xe3, 0xd8, 0x50, 0x08, 0x12, 0x72, 0x10, 0xd4, 0xcc, 0x0a, 0xb6, 0xb7, 0x31, 0x85,
|
||||
0x2b, 0x31, 0x47, 0x7d, 0x4c, 0xb1, 0x95, 0x98, 0x07, 0x83, 0xdd, 0xf3, 0xa0, 0x24, 0xc5, 0xcc,
|
||||
0xdf, 0x3d, 0x85, 0x28, 0xdc, 0x3d, 0x1b, 0x04, 0xb6, 0xf7, 0x98, 0x92, 0x32, 0x9e, 0xf9, 0xdb,
|
||||
0x5b, 0xca, 0xc2, 0xed, 0xad, 0x19, 0xd8, 0xde, 0x52, 0xf0, 0x36, 0xe5, 0xb3, 0x63, 0xca, 0x89,
|
||||
0xbf, 0xbd, 0x5d, 0x26, 0xdc, 0xde, 0x2d, 0xd6, 0x64, 0x49, 0xb6, 0xc3, 0xf1, 0xe2, 0xa2, 0x8a,
|
||||
0xcb, 0xf4, 0x82, 0x0e, 0x03, 0x56, 0x34, 0x84, 0x64, 0x49, 0x28, 0xac, 0x7c, 0xfe, 0x62, 0x10,
|
||||
0xdd, 0x6e, 0x9a, 0x9d, 0x55, 0x95, 0x9c, 0xb5, 0x81, 0xfb, 0x17, 0xfe, 0xf6, 0x45, 0x70, 0x64,
|
||||
0xa7, 0xb2, 0x87, 0x9a, 0x15, 0xb1, 0xfd, 0x45, 0x3a, 0xcf, 0x2b, 0x5d, 0xa8, 0x2f, 0xfa, 0x58,
|
||||
0xb7, 0x14, 0x90, 0x88, 0xdd, 0x4b, 0xd1, 0x4c, 0x96, 0xaa, 0x7d, 0x1a, 0xd9, 0x61, 0x52, 0x81,
|
||||
0xc9, 0xb2, 0x79, 0xde, 0x16, 0x81, 0x4c, 0x96, 0x7e, 0x12, 0x76, 0x85, 0x83, 0x92, 0x2d, 0x8a,
|
||||
0xaa, 0xa3, 0x2b, 0x00, 0x28, 0xdc, 0x15, 0xda, 0xb0, 0xf2, 0xf9, 0x2e, 0xfa, 0x7d, 0xbb, 0xfb,
|
||||
0xd9, 0x0f, 0x7b, 0x13, 0xef, 0x53, 0xbe, 0x47, 0x3c, 0xea, 0x8b, 0x9b, 0x74, 0xb1, 0xf1, 0xcc,
|
||||
0xf7, 0x28, 0x27, 0x69, 0x56, 0x0d, 0x1f, 0xf9, 0x6d, 0x34, 0x72, 0x24, 0x5d, 0xf4, 0x71, 0x30,
|
||||
0xbe, 0xed, 0x2d, 0x8a, 0x2c, 0x8d, 0xdb, 0xfb, 0xc4, 0x4a, 0x57, 0x8b, 0xc3, 0xf1, 0xcd, 0xc6,
|
||||
0x60, 0xbc, 0x1e, 0x53, 0x2e, 0xff, 0x33, 0x59, 0x15, 0xd4, 0x1f, 0xaf, 0x1d, 0x24, 0x1c, 0xaf,
|
||||
0x21, 0x0a, 0xeb, 0x33, 0xa6, 0xfc, 0x88, 0xac, 0xd8, 0x02, 0x89, 0xd7, 0x5a, 0x1c, 0xae, 0x8f,
|
||||
0x8d, 0x99, 0x8c, 0x4d, 0x7b, 0x38, 0xcc, 0x39, 0x2d, 0x73, 0x92, 0xed, 0x67, 0x64, 0x5a, 0x0d,
|
||||
0x91, 0x18, 0xe3, 0x52, 0x48, 0xc6, 0x86, 0xd3, 0x9e, 0xc7, 0x78, 0x58, 0xed, 0x93, 0x25, 0x2b,
|
||||
0x53, 0x8e, 0x3f, 0x46, 0x83, 0x74, 0x3e, 0x46, 0x07, 0xf5, 0x7a, 0xdb, 0x29, 0xe3, 0x59, 0xba,
|
||||
0xa4, 0x49, 0xc0, 0x5b, 0x83, 0xf4, 0xf0, 0x66, 0xa1, 0x9e, 0x46, 0x1b, 0xb3, 0x45, 0x19, 0x53,
|
||||
0xb4, 0xd1, 0xa4, 0xb8, 0xb3, 0xd1, 0x34, 0xa6, 0x3c, 0xfc, 0x7c, 0x10, 0xfd, 0x81, 0x94, 0xda,
|
||||
0x9b, 0xb7, 0x7b, 0xa4, 0x9a, 0x5d, 0x30, 0x52, 0x26, 0xc3, 0x4f, 0x7d, 0x76, 0xbc, 0xa8, 0x76,
|
||||
0xfd, 0xec, 0x26, 0x2a, 0xf0, 0xb1, 0x1e, 0xa5, 0x95, 0x35, 0xe2, 0xbc, 0x8f, 0xd5, 0x41, 0xc2,
|
||||
0x8f, 0x15, 0xa2, 0x30, 0x80, 0x08, 0xb9, 0xdc, 0x28, 0x79, 0x84, 0xea, 0xbb, 0xbb, 0x25, 0x6b,
|
||||
0x9d, 0x1c, 0x8c, 0x8f, 0xb5, 0xd0, 0xed, 0x2d, 0x9b, 0x98, 0x0d, 0x7f, 0x8f, 0x19, 0xf5, 0xc5,
|
||||
0x51, 0xcf, 0x7a, 0x54, 0x84, 0x3d, 0xb7, 0x46, 0xc6, 0xa8, 0x2f, 0x8e, 0x78, 0xb6, 0xc2, 0x5a,
|
||||
0xc8, 0xb3, 0x27, 0xb4, 0x8d, 0xfa, 0xe2, 0x30, 0xfb, 0x52, 0x4c, 0x33, 0x2f, 0x3c, 0x0d, 0xd8,
|
||||
0x81, 0x73, 0xc3, 0x7a, 0x2f, 0x56, 0x39, 0xfc, 0xfb, 0x41, 0xf4, 0x7d, 0xe3, 0xf1, 0x98, 0x25,
|
||||
0xe9, 0xe5, 0x4a, 0x42, 0x6f, 0x48, 0xb6, 0xa0, 0xd5, 0xf0, 0x19, 0x66, 0xad, 0xcd, 0xea, 0x12,
|
||||
0x3c, 0xbf, 0x91, 0x0e, 0x1c, 0x3b, 0x3b, 0x45, 0x91, 0xad, 0x26, 0x74, 0x5e, 0x64, 0xe8, 0xd8,
|
||||
0x71, 0x90, 0xf0, 0xd8, 0x81, 0x28, 0xcc, 0xca, 0x27, 0xac, 0xce, 0xf9, 0xbd, 0x59, 0xb9, 0x10,
|
||||
0x85, 0xb3, 0xf2, 0x06, 0x81, 0xb9, 0xd2, 0x84, 0xed, 0xb2, 0x2c, 0xa3, 0x31, 0x6f, 0xbf, 0x00,
|
||||
0xd6, 0x9a, 0x86, 0x08, 0xe7, 0x4a, 0x80, 0x34, 0x7b, 0x25, 0xcd, 0xfa, 0x94, 0x94, 0xf4, 0xe5,
|
||||
0xea, 0x28, 0xcd, 0xaf, 0x86, 0xfe, 0xb4, 0xc0, 0x00, 0xc8, 0x5e, 0x89, 0x17, 0x84, 0xeb, 0xe0,
|
||||
0xf3, 0x3c, 0x61, 0xfe, 0x75, 0x70, 0x2d, 0x09, 0xaf, 0x83, 0x15, 0x01, 0x4d, 0x9e, 0x51, 0xcc,
|
||||
0x64, 0x2d, 0x09, 0x9b, 0x54, 0x84, 0x2f, 0x14, 0xaa, 0x1d, 0x75, 0x34, 0x14, 0x82, 0x3d, 0xf4,
|
||||
0xb5, 0x4e, 0x0e, 0xf6, 0xd0, 0x66, 0xd1, 0xba, 0x4f, 0x79, 0x3c, 0xf3, 0xf7, 0x50, 0x07, 0x09,
|
||||
0xf7, 0x50, 0x88, 0xc2, 0x2a, 0x4d, 0x98, 0x5e, 0x74, 0x3f, 0xf2, 0xf7, 0x8f, 0xd6, 0x82, 0x7b,
|
||||
0xad, 0x93, 0x83, 0xcb, 0xc8, 0xc3, 0xb9, 0x78, 0x66, 0xde, 0x4e, 0x2e, 0x65, 0xe1, 0x65, 0xa4,
|
||||
0x66, 0x60, 0xe9, 0xa5, 0xa0, 0x7e, 0x9c, 0xfe, 0xd2, 0x1b, 0x79, 0xb8, 0xf4, 0x0e, 0xa7, 0x9c,
|
||||
0xfc, 0xbb, 0x5e, 0xc6, 0x49, 0xe9, 0x09, 0xab, 0xc7, 0xc8, 0x1b, 0x92, 0xa5, 0x09, 0xe1, 0x74,
|
||||
0xc2, 0xae, 0x68, 0xee, 0x5f, 0x31, 0xa9, 0xd2, 0x4a, 0x7e, 0xe4, 0x28, 0x84, 0x57, 0x4c, 0x61,
|
||||
0x45, 0xd8, 0x4f, 0x24, 0x7d, 0x5e, 0xd1, 0x5d, 0x52, 0x21, 0x91, 0xcc, 0x41, 0xc2, 0xfd, 0x04,
|
||||
0xa2, 0x30, 0x5f, 0x95, 0xf2, 0x57, 0xef, 0x0a, 0x5a, 0xa6, 0x34, 0x8f, 0xa9, 0x3f, 0x5f, 0x85,
|
||||
0x54, 0x38, 0x5f, 0xf5, 0xd0, 0xad, 0x6d, 0x1a, 0x1d, 0x9c, 0xda, 0x67, 0x38, 0x20, 0x11, 0x38,
|
||||
0xc3, 0x81, 0xa0, 0xb0, 0x92, 0x06, 0xf0, 0x6e, 0xa3, 0xb6, 0xac, 0x04, 0xb7, 0x51, 0x71, 0xba,
|
||||
0xb5, 0xf9, 0xa5, 0x99, 0x71, 0x3d, 0x4c, 0x3a, 0x8a, 0x3e, 0xb6, 0x87, 0xcb, 0x7a, 0x2f, 0xd6,
|
||||
0xbf, 0xdb, 0x76, 0x46, 0x33, 0x22, 0xa6, 0x90, 0xc0, 0x96, 0x56, 0xc3, 0xf4, 0xd9, 0x6d, 0xb3,
|
||||
0x58, 0xe5, 0xf0, 0x6f, 0x06, 0xd1, 0x27, 0x3e, 0x8f, 0xaf, 0x0b, 0xe1, 0x77, 0xbb, 0xdb, 0x96,
|
||||
0x24, 0x91, 0x43, 0x2a, 0x61, 0x0d, 0x55, 0x86, 0xbf, 0x88, 0x3e, 0x6e, 0x44, 0xe6, 0x0c, 0x8b,
|
||||
0x2a, 0x80, 0x9b, 0x40, 0xe9, 0xf2, 0x43, 0x4e, 0xbb, 0xdf, 0xea, 0xcd, 0x9b, 0xb5, 0x89, 0x5b,
|
||||
0xae, 0x0a, 0xac, 0x4d, 0xb4, 0x0d, 0x25, 0x46, 0xd6, 0x26, 0x1e, 0xcc, 0xec, 0x6a, 0xd8, 0xd5,
|
||||
0x7b, 0x9b, 0xf2, 0x99, 0xc8, 0x7d, 0xc0, 0xae, 0x86, 0x53, 0x56, 0x0d, 0x21, 0xbb, 0x1a, 0x28,
|
||||
0x0c, 0xb3, 0x83, 0x06, 0xac, 0xc7, 0xa6, 0x2f, 0xae, 0x6a, 0x43, 0xf6, 0xc8, 0x7c, 0xdc, 0x0d,
|
||||
0xc2, 0xfe, 0xda, 0x88, 0xd5, 0x32, 0xe4, 0x69, 0xc8, 0x02, 0x58, 0x8a, 0xac, 0xf7, 0x62, 0x95,
|
||||
0xc3, 0xbf, 0x8a, 0xbe, 0xd7, 0xaa, 0xd8, 0x3e, 0x25, 0x7c, 0x51, 0xd2, 0x64, 0xb8, 0xd5, 0x51,
|
||||
0xee, 0x06, 0xd4, 0xae, 0xb7, 0xfb, 0x2b, 0xb4, 0xf2, 0xe5, 0x86, 0x93, 0xdd, 0x4a, 0x97, 0xe1,
|
||||
0x59, 0xc8, 0xa4, 0xcb, 0x06, 0xf3, 0x65, 0x5c, 0xa7, 0xb5, 0xe4, 0xb5, 0x7b, 0xd7, 0xce, 0x92,
|
||||
0xa4, 0x99, 0x78, 0x9d, 0xf5, 0x69, 0xc8, 0xa8, 0x83, 0x06, 0x97, 0xbc, 0xa8, 0x4a, 0x2b, 0x32,
|
||||
0x8b, 0x31, 0x6e, 0x2d, 0x95, 0x36, 0xf0, 0x48, 0xe0, 0x59, 0x29, 0x6d, 0xf6, 0xa4, 0x95, 0x5b,
|
||||
0xde, 0x6c, 0x15, 0xd6, 0x3f, 0xdb, 0x9d, 0xdc, 0xe7, 0x55, 0xa9, 0x7a, 0x7a, 0xfa, 0x66, 0x4f,
|
||||
0x5a, 0x79, 0xfd, 0xcb, 0xe8, 0xe3, 0xb6, 0x57, 0x35, 0x11, 0x6d, 0x75, 0x9a, 0x02, 0x73, 0xd1,
|
||||
0x76, 0x7f, 0x05, 0xb3, 0xbc, 0xf8, 0x32, 0xad, 0x38, 0x2b, 0x57, 0xe3, 0x19, 0xbb, 0x6e, 0xce,
|
||||
0x86, 0xbb, 0xa3, 0x55, 0x01, 0x23, 0x8b, 0x40, 0x96, 0x17, 0x7e, 0xb2, 0xe5, 0xca, 0x9c, 0x21,
|
||||
0xaf, 0x10, 0x57, 0x16, 0xd1, 0xe1, 0xca, 0x25, 0x4d, 0xac, 0x6a, 0x6a, 0x65, 0x0e, 0xbc, 0xaf,
|
||||
0xf9, 0x8b, 0xda, 0x3e, 0xf4, 0xfe, 0xb8, 0x1b, 0x34, 0x19, 0x8b, 0x12, 0xef, 0xa5, 0x97, 0x97,
|
||||
0xba, 0x4e, 0xfe, 0x92, 0xda, 0x08, 0x92, 0xb1, 0x20, 0xa8, 0x49, 0x80, 0xf7, 0xd3, 0x8c, 0x8a,
|
||||
0xdd, 0xf5, 0xd7, 0x97, 0x97, 0x19, 0x23, 0x09, 0x48, 0x80, 0x6b, 0xf1, 0xc8, 0x96, 0x23, 0x09,
|
||||
0xb0, 0x8f, 0x33, 0x6f, 0x14, 0x6b, 0xe9, 0x19, 0x8d, 0x59, 0x1e, 0xa7, 0x19, 0x3c, 0x2a, 0x27,
|
||||
0x34, 0xb5, 0x10, 0x79, 0xa3, 0xd8, 0x82, 0xcc, 0xc4, 0x58, 0x8b, 0xea, 0x61, 0xdf, 0x94, 0xff,
|
||||
0x61, 0x5b, 0xd1, 0x12, 0x23, 0x13, 0xa3, 0x07, 0x33, 0xeb, 0xc0, 0x5a, 0x78, 0x5e, 0x08, 0xe3,
|
||||
0x77, 0xda, 0x5a, 0x52, 0x82, 0xac, 0x03, 0x5d, 0xc2, 0xac, 0x67, 0xea, 0xdf, 0xf7, 0xd8, 0x75,
|
||||
0x2e, 0x8c, 0xde, 0x6b, 0xab, 0x34, 0x32, 0x64, 0x3d, 0x03, 0x19, 0x65, 0xf8, 0x27, 0xd1, 0xaf,
|
||||
0x0b, 0xc3, 0x25, 0x2b, 0x86, 0xb7, 0x3c, 0x0a, 0xa5, 0x75, 0xaa, 0xed, 0x36, 0x2a, 0x37, 0x87,
|
||||
0x33, 0x75, 0xdf, 0x38, 0xaf, 0xc8, 0x94, 0x0e, 0x1f, 0x20, 0x2d, 0x2e, 0xa4, 0xc8, 0xe1, 0xcc,
|
||||
0x36, 0xe5, 0xf6, 0x8a, 0x13, 0x96, 0x28, 0xeb, 0x9e, 0x1a, 0x6a, 0x61, 0xa8, 0x57, 0xd8, 0x90,
|
||||
0x49, 0x66, 0x4e, 0xc8, 0x32, 0x9d, 0xea, 0x09, 0x47, 0xc6, 0xad, 0x0a, 0x24, 0x33, 0x86, 0x19,
|
||||
0x59, 0x10, 0x92, 0xcc, 0xa0, 0xb0, 0xf2, 0xf9, 0x6f, 0x83, 0xe8, 0x8e, 0x61, 0x0e, 0x9a, 0x9d,
|
||||
0xb3, 0xc3, 0xfc, 0x92, 0xd5, 0xa9, 0xcf, 0x51, 0x9a, 0x5f, 0x55, 0xc3, 0xcf, 0x31, 0x93, 0x7e,
|
||||
0x5e, 0x17, 0xe5, 0x8b, 0x1b, 0xeb, 0x99, 0xac, 0xb5, 0xd9, 0x56, 0x32, 0xef, 0x96, 0xa5, 0x06,
|
||||
0xc8, 0x5a, 0xf5, 0xee, 0x13, 0xe4, 0x90, 0xac, 0x35, 0xc4, 0x9b, 0x26, 0xd6, 0xce, 0x33, 0x96,
|
||||
0xc3, 0x26, 0x36, 0x16, 0x6a, 0x21, 0xd2, 0xc4, 0x2d, 0xc8, 0xc4, 0xe3, 0x46, 0x24, 0x77, 0x40,
|
||||
0x76, 0xb2, 0x0c, 0xc4, 0x63, 0xad, 0xaa, 0x01, 0x24, 0x1e, 0x7b, 0x41, 0xe5, 0xe7, 0x2c, 0xfa,
|
||||
0x4e, 0xfd, 0x48, 0x4f, 0x4b, 0xba, 0x4c, 0x29, 0x3c, 0x62, 0x61, 0x49, 0x90, 0xf1, 0xef, 0x12,
|
||||
0x66, 0x64, 0x9d, 0xe7, 0x55, 0x91, 0x91, 0x6a, 0xa6, 0x5e, 0x8c, 0xbb, 0x75, 0x6e, 0x84, 0xf0,
|
||||
0xd5, 0xf8, 0xc3, 0x0e, 0xca, 0x04, 0xf5, 0x46, 0xa6, 0x43, 0xcc, 0x23, 0xbf, 0x6a, 0x2b, 0xcc,
|
||||
0xac, 0x75, 0x72, 0x66, 0xf7, 0xf9, 0x80, 0x64, 0x19, 0x2d, 0x57, 0x8d, 0xec, 0x98, 0xe4, 0xe9,
|
||||
0x25, 0xad, 0x38, 0xd8, 0x7d, 0x56, 0xd4, 0x08, 0x62, 0xc8, 0xee, 0x73, 0x00, 0x37, 0xd9, 0x3c,
|
||||
0xf0, 0x7c, 0x98, 0x27, 0xf4, 0x1d, 0xc8, 0xe6, 0xa1, 0x1d, 0xc1, 0x20, 0xd9, 0x3c, 0xc6, 0x9a,
|
||||
0x5d, 0xd8, 0x97, 0x19, 0x8b, 0xaf, 0xd4, 0x14, 0xe0, 0x36, 0xb0, 0x90, 0xc0, 0x39, 0xe0, 0x5e,
|
||||
0x08, 0x31, 0x93, 0x80, 0x10, 0x9c, 0xd1, 0x22, 0x23, 0x31, 0x3c, 0x0b, 0x23, 0x75, 0x94, 0x0c,
|
||||
0x99, 0x04, 0x20, 0x03, 0x8a, 0xab, 0xce, 0xd8, 0xf8, 0x8a, 0x0b, 0x8e, 0xd8, 0xdc, 0x0b, 0x21,
|
||||
0x66, 0x1a, 0x14, 0x82, 0x71, 0x91, 0xa5, 0x1c, 0x0c, 0x03, 0xa9, 0x21, 0x24, 0xc8, 0x30, 0x70,
|
||||
0x09, 0x60, 0xf2, 0x98, 0x96, 0x53, 0xea, 0x35, 0x29, 0x24, 0x41, 0x93, 0x0d, 0x61, 0x8e, 0x63,
|
||||
0xca, 0xba, 0xb3, 0x62, 0x05, 0x8e, 0x63, 0xaa, 0x6a, 0xb1, 0x62, 0x85, 0x1c, 0xc7, 0x74, 0x00,
|
||||
0x50, 0xc4, 0x53, 0x52, 0x71, 0x7f, 0x11, 0x85, 0x24, 0x58, 0xc4, 0x86, 0x30, 0x73, 0xb4, 0x2c,
|
||||
0xe2, 0x82, 0x83, 0x39, 0x5a, 0x15, 0xc0, 0x7a, 0x1b, 0x7c, 0x1b, 0x95, 0x9b, 0x48, 0x22, 0x5b,
|
||||
0x85, 0xf2, 0xfd, 0x94, 0x66, 0x49, 0x05, 0x22, 0x89, 0x7a, 0xee, 0x8d, 0x14, 0x89, 0x24, 0x6d,
|
||||
0x0a, 0x74, 0x25, 0xb5, 0x57, 0xed, 0xab, 0x1d, 0xd8, 0xa6, 0xbe, 0x17, 0x42, 0x4c, 0x7c, 0x6a,
|
||||
0x0a, 0xbd, 0x4b, 0xca, 0x32, 0xad, 0x27, 0xff, 0x47, 0xfe, 0x02, 0x35, 0x72, 0x24, 0x3e, 0xf9,
|
||||
0x38, 0x30, 0xbc, 0x9a, 0xc0, 0xed, 0x2b, 0x18, 0x0c, 0xdd, 0xf7, 0x83, 0x8c, 0xc9, 0x38, 0x85,
|
||||
0xc4, 0x7a, 0x9d, 0xe9, 0x7b, 0x9a, 0x9e, 0xb7, 0x99, 0x8f, 0xba, 0x30, 0xeb, 0x73, 0x09, 0xed,
|
||||
0xe2, 0x98, 0x2d, 0xe9, 0x84, 0xbd, 0x7a, 0x97, 0x56, 0x3c, 0xcd, 0xa7, 0x6a, 0xe6, 0x7e, 0x8e,
|
||||
0x58, 0xf2, 0xc1, 0xc8, 0xe7, 0x12, 0x9d, 0x4a, 0x26, 0x81, 0x00, 0x65, 0x39, 0xa1, 0xd7, 0xde,
|
||||
0x04, 0x02, 0x5a, 0xd4, 0x1c, 0x92, 0x40, 0x84, 0x78, 0xb3, 0x8f, 0xa2, 0x9d, 0xab, 0x6f, 0x4a,
|
||||
0x27, 0xac, 0xc9, 0xe5, 0x30, 0x6b, 0x10, 0x44, 0x96, 0xb2, 0x41, 0x05, 0xb3, 0xbe, 0xd4, 0xfe,
|
||||
0xcd, 0x10, 0x7b, 0x8c, 0xd8, 0x69, 0x0f, 0xb3, 0x27, 0x3d, 0x48, 0x8f, 0x2b, 0xf3, 0x4e, 0x1e,
|
||||
0x73, 0xd5, 0x7e, 0x25, 0xff, 0xa4, 0x07, 0x69, 0xed, 0xc9, 0xd8, 0xd5, 0x7a, 0x49, 0xe2, 0xab,
|
||||
0x69, 0xc9, 0x16, 0x79, 0xb2, 0xcb, 0x32, 0x56, 0x82, 0x3d, 0x19, 0xa7, 0xd4, 0x00, 0x45, 0xf6,
|
||||
0x64, 0x3a, 0x54, 0x4c, 0x06, 0x67, 0x97, 0x62, 0x27, 0x4b, 0xa7, 0x70, 0x45, 0xed, 0x18, 0x12,
|
||||
0x00, 0x92, 0xc1, 0x79, 0x41, 0x4f, 0x27, 0x92, 0x2b, 0x6e, 0x9e, 0xc6, 0x24, 0x93, 0xfe, 0xb6,
|
||||
0x70, 0x33, 0x0e, 0xd8, 0xd9, 0x89, 0x3c, 0x0a, 0x9e, 0x7a, 0x4e, 0x16, 0x65, 0x7e, 0x98, 0x73,
|
||||
0x86, 0xd6, 0xb3, 0x01, 0x3a, 0xeb, 0x69, 0x81, 0x20, 0xac, 0x4e, 0xe8, 0xbb, 0xba, 0x34, 0xf5,
|
||||
0x3f, 0xbe, 0xb0, 0x5a, 0xff, 0x3e, 0x52, 0xf2, 0x50, 0x58, 0x05, 0x1c, 0xa8, 0x8c, 0x72, 0x22,
|
||||
0x3b, 0x4c, 0x40, 0xdb, 0xed, 0x26, 0x8f, 0xbb, 0x41, 0xbf, 0x9f, 0x31, 0x5f, 0x65, 0x34, 0xe4,
|
||||
0x47, 0x00, 0x7d, 0xfc, 0x34, 0xa0, 0xd9, 0x6e, 0x71, 0xea, 0x33, 0xa3, 0xf1, 0x55, 0xeb, 0x88,
|
||||
0x91, 0x5b, 0x50, 0x89, 0x20, 0xdb, 0x2d, 0x08, 0xea, 0x6f, 0xa2, 0xc3, 0x98, 0xe5, 0xa1, 0x26,
|
||||
0xaa, 0xe5, 0x7d, 0x9a, 0x48, 0x71, 0x66, 0xf1, 0xab, 0xa5, 0xaa, 0x67, 0xca, 0x66, 0x5a, 0x47,
|
||||
0x2c, 0xd8, 0x10, 0xb2, 0xf8, 0x45, 0x61, 0x93, 0x93, 0x43, 0x9f, 0xc7, 0xed, 0xf3, 0xd7, 0x2d,
|
||||
0x2b, 0xc7, 0xf8, 0xf9, 0x6b, 0x8c, 0xc5, 0x2b, 0x29, 0xfb, 0x48, 0x87, 0x15, 0xb7, 0x9f, 0x6c,
|
||||
0xf4, 0x83, 0xcd, 0x92, 0xc7, 0xf1, 0xb9, 0x9b, 0x51, 0x52, 0x4a, 0xaf, 0x9b, 0x01, 0x43, 0x06,
|
||||
0x43, 0x96, 0x3c, 0x01, 0x1c, 0x84, 0x30, 0xc7, 0xf3, 0x2e, 0xcb, 0x39, 0xcd, 0xb9, 0x2f, 0x84,
|
||||
0xb9, 0xc6, 0x14, 0x18, 0x0a, 0x61, 0x98, 0x02, 0xe8, 0xb7, 0x62, 0x3f, 0x88, 0xf2, 0x13, 0x32,
|
||||
0xf7, 0x66, 0x6c, 0x72, 0xaf, 0x47, 0xca, 0x43, 0xfd, 0x16, 0x70, 0xd6, 0x4b, 0x3e, 0xdb, 0xcb,
|
||||
0x84, 0x94, 0x53, 0xbd, 0xbb, 0x91, 0x0c, 0xb7, 0x71, 0x3b, 0x2e, 0x89, 0xbc, 0xe4, 0x0b, 0x6b,
|
||||
0x80, 0xb0, 0x73, 0x38, 0x27, 0x53, 0x5d, 0x53, 0x4f, 0x0d, 0x84, 0xbc, 0x55, 0xd5, 0xc7, 0xdd,
|
||||
0x20, 0xf0, 0xf3, 0x26, 0x4d, 0x28, 0x0b, 0xf8, 0x11, 0xf2, 0x3e, 0x7e, 0x20, 0x08, 0xb2, 0xb7,
|
||||
0xba, 0xde, 0x72, 0x45, 0xb7, 0x93, 0x27, 0x6a, 0x1d, 0x3b, 0x42, 0x1e, 0x0f, 0xe0, 0x42, 0xd9,
|
||||
0x1b, 0xc2, 0x83, 0x31, 0xda, 0x6c, 0xd0, 0x86, 0xc6, 0xa8, 0xde, 0x7f, 0xed, 0x33, 0x46, 0x7d,
|
||||
0xb0, 0xf2, 0xf9, 0x33, 0x35, 0x46, 0xf7, 0x08, 0x27, 0x75, 0xde, 0xfe, 0x26, 0xa5, 0xd7, 0x6a,
|
||||
0x21, 0xec, 0xa9, 0x6f, 0x43, 0x8d, 0xc4, 0x47, 0x7d, 0x60, 0x55, 0xbc, 0xd5, 0x9b, 0x0f, 0xf8,
|
||||
0x56, 0x2b, 0x84, 0x4e, 0xdf, 0x60, 0xa9, 0xb0, 0xd5, 0x9b, 0x0f, 0xf8, 0x56, 0x5f, 0x0b, 0x77,
|
||||
0xfa, 0x06, 0x9f, 0x0c, 0x6f, 0xf5, 0xe6, 0x95, 0xef, 0xbf, 0x6d, 0x06, 0xae, 0xed, 0xbc, 0xce,
|
||||
0xc3, 0x62, 0x9e, 0x2e, 0xa9, 0x2f, 0x9d, 0x74, 0xed, 0x69, 0x34, 0x94, 0x4e, 0xe2, 0x2a, 0xd6,
|
||||
0x15, 0x33, 0xbe, 0x52, 0x9c, 0xb2, 0x2a, 0x15, 0x2f, 0xe9, 0x9f, 0xf7, 0x30, 0xda, 0xc0, 0xa1,
|
||||
0x45, 0x53, 0x48, 0xc9, 0xbc, 0x6e, 0x74, 0x50, 0x73, 0xa2, 0x78, 0x23, 0x60, 0xaf, 0x7d, 0xb0,
|
||||
0x78, 0xb3, 0x27, 0x6d, 0x5e, 0xfc, 0x39, 0x8c, 0xfd, 0xc6, 0x31, 0xd4, 0xaa, 0xde, 0x97, 0x8e,
|
||||
0xdb, 0xfd, 0x15, 0x94, 0xfb, 0xbf, 0x6b, 0xd6, 0x15, 0xd0, 0xbf, 0x1a, 0x04, 0xcf, 0xfa, 0x58,
|
||||
0x04, 0x03, 0xe1, 0xf9, 0x8d, 0x74, 0x54, 0x41, 0xfe, 0xa9, 0x59, 0x40, 0x37, 0xa8, 0xf8, 0xae,
|
||||
0xe2, 0x75, 0x99, 0xd0, 0x52, 0x8d, 0x89, 0x50, 0xb3, 0x1a, 0x18, 0x8e, 0x8c, 0x17, 0x37, 0xd4,
|
||||
0xb2, 0x2e, 0x1c, 0x72, 0x60, 0xf5, 0x6d, 0xa1, 0x55, 0x9e, 0x90, 0x65, 0x8b, 0x86, 0x05, 0xfa,
|
||||
0xfc, 0xa6, 0x6a, 0xd8, 0x58, 0xb1, 0x60, 0x71, 0x7f, 0xc1, 0xf3, 0x9e, 0x86, 0x9d, 0x1b, 0x0d,
|
||||
0x3e, 0xbb, 0x99, 0x92, 0x2a, 0xcb, 0x7f, 0x0f, 0xa2, 0x87, 0x0e, 0x6b, 0xde, 0x27, 0x80, 0x5d,
|
||||
0x8f, 0x1f, 0x07, 0xec, 0x63, 0x4a, 0xba, 0x70, 0x7f, 0xf8, 0xed, 0x94, 0xcd, 0xed, 0x3c, 0x8e,
|
||||
0xca, 0x7e, 0x9a, 0x71, 0x5a, 0xb6, 0x6f, 0xe7, 0x71, 0xed, 0x4a, 0x6a, 0x84, 0xdf, 0xce, 0x13,
|
||||
0xc0, 0xad, 0xdb, 0x79, 0x3c, 0x9e, 0xbd, 0xb7, 0xf3, 0x78, 0xad, 0x05, 0x6f, 0xe7, 0x09, 0x6b,
|
||||
0x60, 0xe1, 0xbd, 0x29, 0x82, 0xdc, 0xb7, 0xee, 0x65, 0xd1, 0xdd, 0xc6, 0x7e, 0x76, 0x13, 0x15,
|
||||
0x64, 0x82, 0x93, 0x9c, 0x38, 0xe7, 0xd6, 0xe3, 0x99, 0x3a, 0x67, 0xdd, 0xb6, 0x7a, 0xf3, 0xca,
|
||||
0xf7, 0x4f, 0xd5, 0xea, 0x46, 0x87, 0x73, 0x56, 0x8a, 0x9b, 0x99, 0xd6, 0x43, 0xe1, 0xb9, 0xb6,
|
||||
0x60, 0xb7, 0xfc, 0x46, 0x3f, 0x18, 0xa9, 0x6e, 0x4d, 0xa8, 0x46, 0x1f, 0x75, 0x19, 0x02, 0x4d,
|
||||
0xbe, 0xd5, 0x9b, 0x47, 0xa6, 0x11, 0xe9, 0x5b, 0xb6, 0x76, 0x0f, 0x63, 0x6e, 0x5b, 0x6f, 0xf7,
|
||||
0x57, 0x50, 0xee, 0x97, 0x2a, 0x6d, 0xb4, 0xdd, 0x8b, 0x76, 0xde, 0xec, 0x32, 0x35, 0x76, 0x9a,
|
||||
0x79, 0xd4, 0x17, 0x0f, 0x25, 0x10, 0xf6, 0x14, 0xda, 0x95, 0x40, 0x78, 0xa7, 0xd1, 0xcf, 0x6e,
|
||||
0xa6, 0xa4, 0xca, 0xf2, 0xaf, 0x83, 0xe8, 0x36, 0x5a, 0x16, 0xd5, 0x0f, 0x3e, 0xef, 0x6b, 0x19,
|
||||
0xf4, 0x87, 0x2f, 0x6e, 0xac, 0xa7, 0x0a, 0xf5, 0x1f, 0x83, 0xe8, 0x4e, 0xa0, 0x50, 0xb2, 0x83,
|
||||
0xdc, 0xc0, 0xba, 0xdb, 0x51, 0x7e, 0x78, 0x73, 0x45, 0x6c, 0xba, 0xb7, 0xf1, 0x71, 0xfb, 0xda,
|
||||
0x9a, 0x80, 0xed, 0x31, 0x7e, 0x6d, 0x4d, 0xb7, 0x16, 0xdc, 0xe4, 0x21, 0x17, 0xcd, 0xa2, 0xcb,
|
||||
0xbb, 0xc9, 0x23, 0x4e, 0xa8, 0x81, 0x35, 0xc7, 0x5a, 0x27, 0xe7, 0x73, 0xf2, 0xea, 0x5d, 0x41,
|
||||
0xf2, 0x04, 0x77, 0x22, 0xe5, 0xdd, 0x4e, 0x34, 0x07, 0x37, 0xc7, 0x6a, 0xe9, 0x19, 0x6b, 0x16,
|
||||
0x52, 0x4f, 0x30, 0x7d, 0x8d, 0x04, 0x37, 0xc7, 0x5a, 0x28, 0xe2, 0x4d, 0x65, 0x8d, 0x21, 0x6f,
|
||||
0x20, 0x59, 0x7c, 0xda, 0x07, 0x05, 0x29, 0xba, 0xf6, 0xa6, 0xf7, 0xdc, 0x37, 0x42, 0x56, 0x5a,
|
||||
0xfb, 0xee, 0x9b, 0x3d, 0x69, 0xc4, 0xed, 0x98, 0xf2, 0x2f, 0x29, 0x49, 0x68, 0x19, 0x74, 0xab,
|
||||
0xa9, 0x5e, 0x6e, 0x6d, 0xda, 0xe7, 0x76, 0x97, 0x65, 0x8b, 0x79, 0xae, 0x1a, 0x13, 0x75, 0x6b,
|
||||
0x53, 0xdd, 0x6e, 0x01, 0x0d, 0xb7, 0x05, 0x8d, 0x5b, 0x91, 0x5e, 0x3e, 0x0d, 0x9b, 0x71, 0xb2,
|
||||
0xca, 0xf5, 0x5e, 0x2c, 0x5e, 0x4f, 0xd5, 0x8d, 0x3a, 0xea, 0x09, 0x7a, 0xd2, 0x66, 0x4f, 0x1a,
|
||||
0xee, 0xcf, 0x59, 0x6e, 0x75, 0x7f, 0xda, 0xea, 0xb0, 0xd5, 0xea, 0x52, 0xdb, 0xfd, 0x15, 0xe0,
|
||||
0x6e, 0xa8, 0xea, 0x55, 0x47, 0x69, 0xc5, 0xf7, 0xd3, 0x2c, 0x1b, 0xae, 0x07, 0xba, 0x49, 0x03,
|
||||
0x05, 0x77, 0x43, 0x3d, 0x30, 0xd2, 0x93, 0x9b, 0xdd, 0xc3, 0x7c, 0xd8, 0x65, 0x47, 0x50, 0xbd,
|
||||
0x7a, 0xb2, 0x4d, 0x83, 0x1d, 0x2d, 0xeb, 0x51, 0xeb, 0xda, 0x8e, 0xc2, 0x0f, 0xae, 0x55, 0xe1,
|
||||
0xad, 0xde, 0x3c, 0x78, 0xdd, 0x2e, 0x28, 0x31, 0xb3, 0x3c, 0xc0, 0x4c, 0x38, 0x33, 0xc9, 0xc3,
|
||||
0x0e, 0x0a, 0xec, 0x0a, 0xca, 0x61, 0xf4, 0x36, 0x4d, 0xa6, 0x94, 0x7b, 0xdf, 0x14, 0xd9, 0x40,
|
||||
0xf0, 0x4d, 0x11, 0x00, 0x41, 0xd3, 0xc9, 0xdf, 0xf5, 0x76, 0xe8, 0x61, 0xe2, 0x6b, 0x3a, 0xa5,
|
||||
0x6c, 0x51, 0xa1, 0xa6, 0xf3, 0xd2, 0x20, 0x1a, 0x68, 0xb7, 0xea, 0xd3, 0xf8, 0xa7, 0x21, 0x33,
|
||||
0xe0, 0xfb, 0xf8, 0xf5, 0x5e, 0x2c, 0x98, 0x51, 0x8c, 0xc3, 0x74, 0x9e, 0x72, 0xdf, 0x8c, 0x62,
|
||||
0xd9, 0xa8, 0x91, 0xd0, 0x8c, 0xd2, 0x46, 0xb1, 0xea, 0xd5, 0x39, 0xc2, 0x61, 0x12, 0xae, 0x9e,
|
||||
0x64, 0xfa, 0x55, 0x4f, 0xb3, 0xad, 0x17, 0x9b, 0xb9, 0xee, 0x32, 0x7c, 0xa6, 0x16, 0xcb, 0x9e,
|
||||
0xbe, 0x2d, 0x3e, 0x99, 0x84, 0x60, 0x28, 0xea, 0x60, 0x0a, 0x70, 0xc3, 0xbe, 0xe6, 0x9a, 0x77,
|
||||
0xaf, 0x45, 0x41, 0x49, 0x49, 0xf2, 0xd8, 0xbb, 0x38, 0x15, 0x06, 0x5b, 0x64, 0x68, 0x71, 0x8a,
|
||||
0x6a, 0x80, 0xd7, 0xe6, 0xee, 0xc7, 0x8e, 0x9e, 0xa1, 0xa0, 0xbf, 0x2a, 0x74, 0xbf, 0x75, 0x7c,
|
||||
0xd2, 0x83, 0x84, 0xaf, 0xcd, 0x1b, 0x40, 0x6f, 0x7c, 0x4b, 0xa7, 0x9f, 0x06, 0x4c, 0xb9, 0x68,
|
||||
0x68, 0x21, 0x8c, 0xab, 0x80, 0x4e, 0xad, 0x13, 0x5c, 0xca, 0x7f, 0x42, 0x57, 0xbe, 0x4e, 0x6d,
|
||||
0xf2, 0x53, 0x81, 0x84, 0x3a, 0x75, 0x1b, 0x05, 0x79, 0xa6, 0xbd, 0x0e, 0x7a, 0x14, 0xd0, 0xb7,
|
||||
0x97, 0x3e, 0x6b, 0x9d, 0x1c, 0x18, 0x39, 0x7b, 0xe9, 0xd2, 0x79, 0x4f, 0xe0, 0x29, 0xe8, 0x5e,
|
||||
0xba, 0xf4, 0xbf, 0x26, 0x58, 0xef, 0xc5, 0xc2, 0x57, 0xf2, 0x84, 0xd3, 0x77, 0xcd, 0xbb, 0x72,
|
||||
0x4f, 0x71, 0x85, 0xbc, 0xf5, 0xb2, 0xfc, 0x71, 0x37, 0x68, 0x0e, 0xc0, 0x9e, 0x96, 0x2c, 0xa6,
|
||||
0x55, 0xa5, 0xee, 0xf2, 0x73, 0x4f, 0x18, 0x29, 0xd9, 0x08, 0xdc, 0xe4, 0xf7, 0x20, 0x0c, 0x29,
|
||||
0xdb, 0x5f, 0x46, 0xef, 0x1f, 0xb1, 0xe9, 0x98, 0xe6, 0xc9, 0xf0, 0x07, 0xee, 0x91, 0x53, 0x36,
|
||||
0x1d, 0xd5, 0x3f, 0x6b, 0x7b, 0xb7, 0x30, 0xb1, 0x39, 0x34, 0xb7, 0x47, 0x2f, 0x16, 0xd3, 0x31,
|
||||
0x27, 0x1c, 0x1c, 0x9a, 0x13, 0xbf, 0x8f, 0x6a, 0x01, 0x72, 0x68, 0xce, 0x01, 0x80, 0xbd, 0x49,
|
||||
0x49, 0xa9, 0xd7, 0x5e, 0x2d, 0x08, 0xda, 0x53, 0x80, 0x99, 0x75, 0xb5, 0xbd, 0x3a, 0xb1, 0x85,
|
||||
0x87, 0xdc, 0x8c, 0x8e, 0x90, 0x22, 0xb3, 0x6e, 0x9b, 0x32, 0x9d, 0x41, 0x56, 0x5f, 0xdc, 0x98,
|
||||
0xb1, 0x98, 0xcf, 0x49, 0xb9, 0x02, 0x9d, 0x41, 0xd5, 0xd2, 0x02, 0x90, 0xce, 0xe0, 0x05, 0x4d,
|
||||
0x2f, 0x6f, 0x1e, 0x73, 0x7c, 0x75, 0xc0, 0x4a, 0xb6, 0xe0, 0x69, 0x4e, 0xe1, 0xad, 0x09, 0xfa,
|
||||
0x81, 0xda, 0x0c, 0xd2, 0xcb, 0x31, 0xd6, 0x64, 0x85, 0x82, 0x90, 0xe7, 0xef, 0xc4, 0x8d, 0xb8,
|
||||
0x15, 0x67, 0x25, 0x7c, 0xff, 0x26, 0xad, 0x40, 0x08, 0xc9, 0x0a, 0x51, 0x18, 0xb4, 0xfd, 0x69,
|
||||
0x9a, 0x4f, 0xbd, 0x6d, 0x7f, 0x6a, 0xdf, 0x27, 0x79, 0x07, 0x07, 0x4c, 0x7c, 0x97, 0x0f, 0x4d,
|
||||
0xde, 0x4c, 0xa4, 0xbe, 0x7d, 0xf4, 0x3e, 0x74, 0x9b, 0x40, 0xe2, 0xbb, 0x9f, 0x04, 0xae, 0x5e,
|
||||
0x17, 0x34, 0xa7, 0x49, 0x73, 0xca, 0xcc, 0xe7, 0xca, 0x21, 0x82, 0xae, 0x20, 0x69, 0xa2, 0xaa,
|
||||
0x90, 0x9f, 0x2d, 0xf2, 0xd3, 0x92, 0x5d, 0xa6, 0x19, 0x2d, 0x41, 0x54, 0x95, 0xea, 0x96, 0x1c,
|
||||
0x89, 0xaa, 0x3e, 0xce, 0x1c, 0x57, 0x10, 0x52, 0xe7, 0x5a, 0xe7, 0x49, 0x49, 0x62, 0x78, 0x5c,
|
||||
0x41, 0xda, 0x68, 0x63, 0xc8, 0x4e, 0x5a, 0x00, 0xb7, 0x12, 0x03, 0xe9, 0x3a, 0x5f, 0x89, 0xfe,
|
||||
0xa1, 0xbe, 0xbd, 0x13, 0xb7, 0x2c, 0x56, 0x20, 0x31, 0x50, 0xe6, 0x7c, 0x24, 0x92, 0x18, 0x84,
|
||||
0x35, 0xcc, 0x68, 0x3b, 0xa6, 0xbc, 0x4c, 0xe3, 0x6a, 0x4c, 0xf9, 0x29, 0x29, 0xc9, 0x9c, 0x72,
|
||||
0x5a, 0xc2, 0xd1, 0xa6, 0x90, 0x91, 0xc3, 0x20, 0xa3, 0x0d, 0x63, 0x95, 0xc3, 0x3f, 0x8a, 0x3e,
|
||||
0xac, 0x27, 0x1b, 0x9a, 0xab, 0xbf, 0x82, 0xf0, 0x4a, 0xfc, 0xf9, 0x94, 0xe1, 0x47, 0xda, 0xc6,
|
||||
0x98, 0x97, 0x94, 0xcc, 0x1b, 0xdb, 0x1f, 0xe8, 0xdf, 0x05, 0xb8, 0x3d, 0xa8, 0x3b, 0xc5, 0x09,
|
||||
0xe3, 0xe9, 0x65, 0xbd, 0xb6, 0x53, 0x9f, 0xad, 0x80, 0x4e, 0x61, 0x8b, 0x47, 0x81, 0xcb, 0x08,
|
||||
0x7c, 0x9c, 0x09, 0x76, 0xb6, 0xf4, 0x8c, 0x16, 0x19, 0x0c, 0x76, 0x8e, 0xb6, 0x00, 0x90, 0x60,
|
||||
0xe7, 0x05, 0x4d, 0x0f, 0xb7, 0xc5, 0x13, 0x1a, 0xae, 0xcc, 0x84, 0xf6, 0xab, 0xcc, 0xc4, 0xf9,
|
||||
0x12, 0x20, 0x8b, 0x3e, 0x3c, 0xa6, 0xf3, 0x0b, 0x5a, 0x56, 0xb3, 0xb4, 0x38, 0xa8, 0x67, 0x79,
|
||||
0xc2, 0x17, 0xf0, 0x5b, 0x39, 0x43, 0x8c, 0x34, 0x82, 0xa4, 0x42, 0x08, 0x6a, 0xc2, 0xa9, 0x01,
|
||||
0x0e, 0xab, 0x13, 0x32, 0xa7, 0xe2, 0x6a, 0x85, 0xe1, 0x3a, 0x66, 0xc4, 0x82, 0x90, 0x70, 0x8a,
|
||||
0xc2, 0xd6, 0x47, 0x45, 0x86, 0x39, 0xa3, 0xd3, 0xba, 0x87, 0x95, 0xa7, 0x64, 0x35, 0xa7, 0x39,
|
||||
0x57, 0x26, 0xc1, 0x46, 0xb0, 0x65, 0xd2, 0xcf, 0x23, 0x1b, 0xc1, 0x7d, 0xf4, 0xac, 0xf1, 0xed,
|
||||
0x3c, 0xf8, 0x53, 0x56, 0x72, 0xf9, 0x37, 0x4e, 0xce, 0xcb, 0x0c, 0x8c, 0x6f, 0xf7, 0xa1, 0x3a,
|
||||
0x24, 0x32, 0xbe, 0xc3, 0x1a, 0xd6, 0xe5, 0xe0, 0x4e, 0x19, 0xde, 0xd0, 0x52, 0xf7, 0x93, 0x57,
|
||||
0x73, 0x92, 0x66, 0xaa, 0x37, 0xfc, 0x28, 0x60, 0x1b, 0xd1, 0x41, 0x2e, 0x07, 0xef, 0xab, 0x6b,
|
||||
0x5d, 0xa7, 0x1e, 0x2e, 0x21, 0xd8, 0x97, 0xee, 0xb0, 0x8f, 0xec, 0x4b, 0x77, 0x6b, 0x99, 0xe5,
|
||||
0xa2, 0x61, 0x05, 0xb7, 0x12, 0xc4, 0x2e, 0x4b, 0xe0, 0x26, 0x95, 0x65, 0x13, 0x80, 0xc8, 0x72,
|
||||
0x31, 0xa8, 0x60, 0xe6, 0x57, 0x83, 0xed, 0xa7, 0x39, 0xc9, 0xd2, 0x9f, 0xc1, 0x13, 0xce, 0x96,
|
||||
0x9d, 0x86, 0x40, 0xe6, 0x57, 0x3f, 0xe9, 0x73, 0x75, 0x40, 0xf9, 0x24, 0xad, 0x43, 0xff, 0xe3,
|
||||
0xc0, 0x73, 0x13, 0x44, 0xb7, 0x2b, 0x8b, 0xb4, 0x2e, 0xe9, 0x84, 0x8f, 0x75, 0xa7, 0x28, 0xc6,
|
||||
0xf5, 0xd4, 0x74, 0x46, 0x63, 0x9a, 0x16, 0x7c, 0xf8, 0x22, 0xfc, 0xac, 0x00, 0x8e, 0xbc, 0xdd,
|
||||
0xef, 0xa1, 0x66, 0xbd, 0x33, 0xae, 0x63, 0xc9, 0x58, 0xfe, 0xf1, 0xaf, 0xf3, 0x8a, 0x96, 0x6a,
|
||||
0xb6, 0x3e, 0xa0, 0x1c, 0x8c, 0x4e, 0x8b, 0x1b, 0x59, 0x60, 0x5d, 0x51, 0x64, 0x74, 0x86, 0x35,
|
||||
0xcc, 0x0e, 0x93, 0xc5, 0x9d, 0xd1, 0x8a, 0x65, 0x4b, 0x2a, 0x0e, 0xb9, 0x6d, 0xa0, 0xc6, 0x2c,
|
||||
0x0a, 0xd9, 0x61, 0xc2, 0x69, 0x93, 0xf2, 0xb4, 0xdd, 0xee, 0xe4, 0xab, 0x43, 0xf8, 0x9e, 0xde,
|
||||
0x63, 0x49, 0x60, 0x48, 0xca, 0x13, 0xc0, 0xad, 0x1d, 0xd8, 0x92, 0x91, 0x24, 0x26, 0x15, 0x3f,
|
||||
0x25, 0xab, 0x8c, 0x91, 0x44, 0xcc, 0xeb, 0x70, 0x07, 0xb6, 0x61, 0x46, 0x36, 0x84, 0xed, 0xc0,
|
||||
0x62, 0xb0, 0x59, 0x5d, 0xaa, 0xbf, 0x69, 0xa6, 0x0e, 0x10, 0xde, 0x07, 0x69, 0x92, 0x28, 0x2f,
|
||||
0x3c, 0x3c, 0xf8, 0x20, 0x0c, 0x99, 0x0f, 0x9f, 0xa4, 0x48, 0xa4, 0x21, 0x77, 0x7c, 0x3a, 0x4e,
|
||||
0x02, 0x72, 0x37, 0x40, 0x98, 0xcb, 0x10, 0xe4, 0xef, 0xcd, 0x9f, 0xe5, 0xe0, 0xea, 0x2a, 0xe3,
|
||||
0x0d, 0x9f, 0xae, 0x0d, 0x8d, 0xec, 0x1b, 0xce, 0x36, 0x7b, 0xd2, 0x66, 0xf1, 0xb8, 0x3b, 0x23,
|
||||
0x7c, 0x27, 0x49, 0x8e, 0x69, 0xe5, 0xf9, 0x8a, 0xb9, 0x16, 0x8e, 0x8c, 0x14, 0x59, 0x3c, 0xb6,
|
||||
0x29, 0xd3, 0xd1, 0x6b, 0xd9, 0xab, 0x24, 0xe5, 0x4a, 0xd6, 0x1c, 0xcb, 0xdd, 0x68, 0x1b, 0x68,
|
||||
0x53, 0x48, 0xad, 0x70, 0xda, 0xc4, 0xf2, 0x9a, 0x99, 0xb0, 0xe9, 0x34, 0xa3, 0x0a, 0x3a, 0xa3,
|
||||
0x44, 0xde, 0xe4, 0xb6, 0xd5, 0xb6, 0xe5, 0x05, 0x91, 0x58, 0x1e, 0x54, 0x30, 0x69, 0x64, 0x8d,
|
||||
0xc9, 0xf7, 0x20, 0xcd, 0x83, 0x5d, 0x6b, 0x9b, 0x71, 0x00, 0x24, 0x8d, 0xf4, 0x82, 0xe6, 0x63,
|
||||
0xab, 0x5a, 0x7c, 0x40, 0x9b, 0x27, 0x01, 0xef, 0xbd, 0x11, 0xca, 0x96, 0x18, 0xf9, 0xd8, 0xca,
|
||||
0x83, 0x99, 0x75, 0x02, 0xf0, 0xf0, 0x72, 0x75, 0x98, 0xc0, 0x75, 0x02, 0xd4, 0x17, 0x0c, 0xb2,
|
||||
0x4e, 0xc0, 0x58, 0xb7, 0xe9, 0xf4, 0x25, 0xbf, 0x47, 0xa4, 0x32, 0x95, 0xf3, 0x34, 0x9d, 0x17,
|
||||
0x0c, 0x35, 0x1d, 0xa6, 0xe0, 0x3e, 0x52, 0xfb, 0x0a, 0x61, 0xcf, 0x23, 0xf5, 0x5d, 0x1d, 0xfc,
|
||||
0xa8, 0x0b, 0x83, 0x37, 0xc8, 0xab, 0x81, 0xe7, 0xbf, 0x41, 0x5e, 0x09, 0xc3, 0x37, 0xc8, 0x1b,
|
||||
0x48, 0xda, 0x7e, 0x79, 0xf7, 0x7f, 0xbe, 0xbe, 0x35, 0xf8, 0xe5, 0xd7, 0xb7, 0x06, 0xff, 0xf7,
|
||||
0xf5, 0xad, 0xc1, 0x2f, 0xbe, 0xb9, 0xf5, 0xde, 0x2f, 0xbf, 0xb9, 0xf5, 0xde, 0xff, 0x7e, 0x73,
|
||||
0xeb, 0xbd, 0xaf, 0xde, 0x57, 0x7f, 0xeb, 0xf2, 0xe2, 0xd7, 0xc4, 0x5f, 0xac, 0x7c, 0xfe, 0xab,
|
||||
0x00, 0x00, 0x00, 0xff, 0xff, 0x33, 0xda, 0x96, 0xd3, 0x0f, 0x73, 0x00, 0x00,
|
||||
// 5243 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x9d, 0xdf, 0x6f, 0x24, 0x49,
|
||||
0x52, 0xf8, 0xd7, 0x2f, 0xdf, 0xfd, 0x52, 0xc7, 0x2d, 0xd0, 0x0b, 0xcb, 0xde, 0x72, 0x37, 0xbf,
|
||||
0x76, 0xc6, 0xf6, 0x8c, 0xed, 0xf6, 0xec, 0xcc, 0xce, 0xee, 0xe9, 0x0e, 0x09, 0x79, 0xec, 0xb1,
|
||||
0xd7, 0x9c, 0xed, 0x31, 0x6e, 0x7b, 0x46, 0x5a, 0x09, 0x89, 0x72, 0x75, 0xba, 0xbb, 0x70, 0x75,
|
||||
0x65, 0x5d, 0x55, 0x76, 0x7b, 0xfa, 0x10, 0x08, 0x04, 0x02, 0x81, 0x40, 0x77, 0xe2, 0xd7, 0x2b,
|
||||
0x12, 0x7f, 0x0d, 0x8f, 0xf7, 0xc8, 0x23, 0xda, 0x7d, 0xe1, 0xcf, 0x40, 0x95, 0x99, 0x95, 0x3f,
|
||||
0xa2, 0x22, 0xb2, 0xca, 0xf7, 0x34, 0xa3, 0x8e, 0x4f, 0x44, 0x64, 0x56, 0x66, 0x46, 0x46, 0x66,
|
||||
0x65, 0xa5, 0xa3, 0xbb, 0xc5, 0xe5, 0x76, 0x51, 0x72, 0xc1, 0xab, 0xed, 0x8a, 0x95, 0x8b, 0x34,
|
||||
0x61, 0xcd, 0xbf, 0x43, 0xf9, 0xf3, 0xe0, 0xfd, 0x38, 0x5f, 0x8a, 0x65, 0xc1, 0x3e, 0xf9, 0xd8,
|
||||
0x92, 0x09, 0x9f, 0xcd, 0xe2, 0x7c, 0x5c, 0x29, 0xe4, 0x93, 0x8f, 0xac, 0x84, 0x2d, 0x58, 0x2e,
|
||||
0xf4, 0xef, 0xcf, 0x7e, 0xfe, 0xbf, 0x2b, 0xd1, 0x07, 0xbb, 0x59, 0xca, 0x72, 0xb1, 0xab, 0x35,
|
||||
0x06, 0x5f, 0x47, 0xdf, 0xdd, 0x29, 0x8a, 0x03, 0x26, 0xde, 0xb0, 0xb2, 0x4a, 0x79, 0x3e, 0xf8,
|
||||
0x74, 0xa8, 0x1d, 0x0c, 0xcf, 0x8a, 0x64, 0xb8, 0x53, 0x14, 0x43, 0x2b, 0x1c, 0x9e, 0xb1, 0x9f,
|
||||
0xce, 0x59, 0x25, 0x3e, 0x79, 0x18, 0x86, 0xaa, 0x82, 0xe7, 0x15, 0x1b, 0x5c, 0x45, 0xbf, 0xb5,
|
||||
0x53, 0x14, 0x23, 0x26, 0xf6, 0x58, 0x5d, 0x81, 0x91, 0x88, 0x05, 0x1b, 0xac, 0xb5, 0x54, 0x7d,
|
||||
0xc0, 0xf8, 0x58, 0xef, 0x06, 0xb5, 0x9f, 0xf3, 0xe8, 0x3b, 0xb5, 0x9f, 0xe9, 0x5c, 0x8c, 0xf9,
|
||||
0x4d, 0x3e, 0xb8, 0xdf, 0x56, 0xd4, 0x22, 0x63, 0xfb, 0x41, 0x08, 0xd1, 0x56, 0xdf, 0x46, 0xbf,
|
||||
0xfe, 0x36, 0xce, 0x32, 0x26, 0x76, 0x4b, 0x56, 0x17, 0xdc, 0xd7, 0x51, 0xa2, 0xa1, 0x92, 0x19,
|
||||
0xbb, 0x9f, 0x06, 0x19, 0x6d, 0xf8, 0xeb, 0xe8, 0xbb, 0x4a, 0x72, 0xc6, 0x12, 0xbe, 0x60, 0xe5,
|
||||
0x00, 0xd5, 0xd2, 0x42, 0xe2, 0x91, 0xb7, 0x20, 0x68, 0x7b, 0x97, 0xe7, 0x0b, 0x56, 0x0a, 0xdc,
|
||||
0xb6, 0x16, 0x86, 0x6d, 0x5b, 0x48, 0xdb, 0xfe, 0xfb, 0x95, 0xe8, 0xfb, 0x3b, 0x49, 0xc2, 0xe7,
|
||||
0xb9, 0x38, 0xe2, 0x49, 0x9c, 0x1d, 0xa5, 0xf9, 0xf5, 0x09, 0xbb, 0xd9, 0x9d, 0xd6, 0x7c, 0x3e,
|
||||
0x61, 0x83, 0xe7, 0xfe, 0x53, 0x55, 0xe8, 0xd0, 0xb0, 0x43, 0x17, 0x36, 0xbe, 0x3f, 0xbf, 0x9d,
|
||||
0x92, 0x2e, 0xcb, 0xcf, 0x57, 0xa2, 0x3b, 0xb0, 0x2c, 0x23, 0x9e, 0x2d, 0x98, 0x2d, 0xcd, 0x8b,
|
||||
0x0e, 0xc3, 0x3e, 0x6e, 0xca, 0xf3, 0xc5, 0x6d, 0xd5, 0x74, 0x89, 0xb2, 0xe8, 0x43, 0xb7, 0xbb,
|
||||
0x8c, 0x58, 0x25, 0x87, 0xd3, 0x63, 0xba, 0x47, 0x68, 0xc4, 0x78, 0x7e, 0xd2, 0x07, 0xd5, 0xde,
|
||||
0xd2, 0x68, 0xa0, 0xbd, 0x65, 0xbc, 0x32, 0xce, 0xd6, 0x51, 0x0b, 0x0e, 0x61, 0x7c, 0x3d, 0xee,
|
||||
0x41, 0x6a, 0x57, 0x7f, 0x12, 0xfd, 0xc6, 0x5b, 0x5e, 0x5e, 0x57, 0x45, 0x9c, 0x30, 0x3d, 0x14,
|
||||
0x1e, 0xf9, 0xda, 0x8d, 0x14, 0x8e, 0x86, 0xd5, 0x2e, 0xcc, 0xe9, 0xb4, 0x8d, 0xf0, 0x75, 0xc1,
|
||||
0x60, 0x0c, 0xb2, 0x8a, 0xb5, 0x90, 0xea, 0xb4, 0x10, 0xd2, 0xb6, 0xaf, 0xa3, 0x81, 0xb5, 0x7d,
|
||||
0xf9, 0xa7, 0x2c, 0x11, 0x3b, 0xe3, 0x31, 0x6c, 0x15, 0xab, 0x2b, 0x89, 0xe1, 0xce, 0x78, 0x4c,
|
||||
0xb5, 0x0a, 0x8e, 0x6a, 0x67, 0x37, 0xd1, 0x47, 0xc0, 0xd9, 0x51, 0x5a, 0x49, 0x87, 0x5b, 0x61,
|
||||
0x2b, 0x1a, 0x33, 0x4e, 0x87, 0x7d, 0x71, 0xed, 0xf8, 0x2f, 0x57, 0xa2, 0xef, 0x21, 0x9e, 0xcf,
|
||||
0xd8, 0x8c, 0x2f, 0xd8, 0xe0, 0x69, 0xb7, 0x35, 0x45, 0x1a, 0xff, 0x9f, 0xdd, 0x42, 0x03, 0xe9,
|
||||
0x26, 0x23, 0x96, 0xb1, 0x44, 0x90, 0xdd, 0x44, 0x89, 0x3b, 0xbb, 0x89, 0xc1, 0x9c, 0x11, 0xd6,
|
||||
0x08, 0x0f, 0x98, 0xd8, 0x9d, 0x97, 0x25, 0xcb, 0x05, 0xd9, 0x96, 0x16, 0xe9, 0x6c, 0x4b, 0x0f,
|
||||
0x45, 0xea, 0x73, 0xc0, 0xc4, 0x4e, 0x96, 0x91, 0xf5, 0x51, 0xe2, 0xce, 0xfa, 0x18, 0x4c, 0x7b,
|
||||
0x48, 0xa2, 0xdf, 0x74, 0x9e, 0x98, 0x38, 0xcc, 0xaf, 0xf8, 0x80, 0x7e, 0x16, 0x52, 0x6e, 0x7c,
|
||||
0xac, 0x75, 0x72, 0x48, 0x35, 0x5e, 0xbd, 0x2b, 0x78, 0x49, 0x37, 0x8b, 0x12, 0x77, 0x56, 0xc3,
|
||||
0x60, 0xda, 0xc3, 0x1f, 0x47, 0x1f, 0xe8, 0x28, 0xd9, 0xcc, 0x67, 0x0f, 0xd1, 0x10, 0x0a, 0x27,
|
||||
0xb4, 0x47, 0x1d, 0x94, 0x0d, 0x0e, 0x5a, 0xa6, 0x83, 0xcf, 0xa7, 0xa8, 0x1e, 0x08, 0x3d, 0x0f,
|
||||
0xc3, 0x50, 0xcb, 0xf6, 0x1e, 0xcb, 0x18, 0x69, 0x5b, 0x09, 0x3b, 0x6c, 0x1b, 0x48, 0xdb, 0x2e,
|
||||
0xa3, 0xdf, 0x31, 0x8f, 0xa5, 0x9e, 0x47, 0xa5, 0xbc, 0x0e, 0xd2, 0x1b, 0x44, 0xbd, 0x5d, 0xc8,
|
||||
0xf8, 0xda, 0xec, 0x07, 0xb7, 0xea, 0xa3, 0x47, 0x20, 0x5e, 0x1f, 0x30, 0xfe, 0x1e, 0x86, 0x21,
|
||||
0x6d, 0xfb, 0x1f, 0x56, 0xa2, 0x1f, 0x68, 0xd9, 0xab, 0x3c, 0xbe, 0xcc, 0x98, 0x9c, 0x12, 0x4f,
|
||||
0x98, 0xb8, 0xe1, 0xe5, 0xf5, 0x68, 0x99, 0x27, 0xc4, 0xf4, 0x8f, 0xc3, 0x1d, 0xd3, 0x3f, 0xa9,
|
||||
0xe4, 0x64, 0x7c, 0xba, 0xa2, 0x82, 0x17, 0x30, 0xe3, 0x6b, 0x6a, 0x20, 0x78, 0x41, 0x65, 0x7c,
|
||||
0x3e, 0xd2, 0xb2, 0x7a, 0x5c, 0x87, 0x4d, 0xdc, 0xea, 0xb1, 0x1b, 0x27, 0x1f, 0x84, 0x10, 0x1b,
|
||||
0xb6, 0x9a, 0x0e, 0xcc, 0xf3, 0xab, 0x74, 0x72, 0x51, 0x8c, 0xeb, 0x6e, 0xfc, 0x18, 0xef, 0xa1,
|
||||
0x0e, 0x42, 0x84, 0x2d, 0x02, 0xd5, 0xde, 0xfe, 0xc9, 0x26, 0x46, 0x7a, 0x28, 0xed, 0x97, 0x7c,
|
||||
0x76, 0xc4, 0x26, 0x71, 0xb2, 0xd4, 0xe3, 0xff, 0xf3, 0xd0, 0xc0, 0x83, 0xb4, 0x29, 0xc4, 0x8b,
|
||||
0x5b, 0x6a, 0xe9, 0xf2, 0xfc, 0xc7, 0x4a, 0xf4, 0xb0, 0xa9, 0xfe, 0x34, 0xce, 0x27, 0x4c, 0xb7,
|
||||
0xa7, 0x2a, 0xfd, 0x4e, 0x3e, 0x3e, 0x63, 0x95, 0x88, 0x4b, 0x31, 0xf8, 0x11, 0x5e, 0xc9, 0x90,
|
||||
0x8e, 0x29, 0xdb, 0x8f, 0x7f, 0x25, 0x5d, 0xdb, 0xea, 0xa3, 0x3a, 0xb0, 0xe9, 0x10, 0xe0, 0xb7,
|
||||
0xba, 0x94, 0xc0, 0x00, 0xf0, 0x20, 0x84, 0xd8, 0x56, 0x97, 0x82, 0xc3, 0x7c, 0x91, 0x0a, 0x76,
|
||||
0xc0, 0x72, 0x56, 0xb6, 0x5b, 0x5d, 0xa9, 0xfa, 0x08, 0xd1, 0xea, 0x04, 0x6a, 0x83, 0x8d, 0xe7,
|
||||
0xcd, 0x4c, 0x8e, 0x1b, 0x01, 0x23, 0xad, 0xe9, 0x71, 0xb3, 0x1f, 0x6c, 0x57, 0x77, 0x8e, 0xcf,
|
||||
0x33, 0xb6, 0xe0, 0xd7, 0x70, 0x75, 0xe7, 0x9a, 0x50, 0x00, 0xb1, 0xba, 0x43, 0x41, 0x3b, 0x83,
|
||||
0x39, 0x7e, 0xde, 0xa4, 0xec, 0x06, 0xcc, 0x60, 0xae, 0x72, 0x2d, 0x26, 0x66, 0x30, 0x04, 0xd3,
|
||||
0x1e, 0x4e, 0xa2, 0x5f, 0x93, 0xc2, 0x3f, 0xe4, 0x69, 0x3e, 0xb8, 0x8b, 0x28, 0xd5, 0x02, 0x63,
|
||||
0xf5, 0x1e, 0x0d, 0x80, 0x12, 0xd7, 0xbf, 0xee, 0xc6, 0x79, 0xc2, 0x32, 0xb4, 0xc4, 0x56, 0x1c,
|
||||
0x2c, 0xb1, 0x87, 0xd9, 0xd4, 0x41, 0x0a, 0xeb, 0xf8, 0x35, 0x9a, 0xc6, 0x65, 0x9a, 0x4f, 0x06,
|
||||
0x98, 0xae, 0x23, 0x27, 0x52, 0x07, 0x8c, 0x03, 0x5d, 0x58, 0x2b, 0xee, 0x14, 0x45, 0x59, 0x87,
|
||||
0x45, 0xac, 0x0b, 0xfb, 0x48, 0xb0, 0x0b, 0xb7, 0x50, 0xdc, 0xdb, 0x1e, 0x4b, 0xb2, 0x34, 0x0f,
|
||||
0x7a, 0xd3, 0x48, 0x1f, 0x6f, 0x16, 0x05, 0x9d, 0xf7, 0x88, 0xc5, 0x0b, 0xd6, 0xd4, 0x0c, 0x7b,
|
||||
0x32, 0x2e, 0x10, 0xec, 0xbc, 0x00, 0xb4, 0xeb, 0x34, 0x29, 0x3e, 0x8e, 0xaf, 0x59, 0xfd, 0x80,
|
||||
0x59, 0x3d, 0xaf, 0x0d, 0x30, 0x7d, 0x8f, 0x20, 0xd6, 0x69, 0x38, 0xa9, 0x5d, 0xcd, 0xa3, 0x8f,
|
||||
0xa4, 0xfc, 0x34, 0x2e, 0x45, 0x9a, 0xa4, 0x45, 0x9c, 0x37, 0xf9, 0x3f, 0x36, 0xae, 0x5b, 0x94,
|
||||
0x71, 0xb9, 0xd5, 0x93, 0xd6, 0x6e, 0xff, 0x7d, 0x25, 0xba, 0x0f, 0xfd, 0x9e, 0xb2, 0x72, 0x96,
|
||||
0xca, 0x65, 0x64, 0xa5, 0x82, 0xf0, 0xe0, 0xcb, 0xb0, 0xd1, 0x96, 0x82, 0x29, 0xcd, 0x0f, 0x6f,
|
||||
0xaf, 0xa8, 0x0b, 0xf6, 0x47, 0x51, 0xa4, 0x96, 0x2b, 0x72, 0x49, 0xe9, 0x8f, 0x5a, 0xbd, 0x8e,
|
||||
0xf1, 0xd6, 0x93, 0xf7, 0x03, 0x84, 0x9d, 0x2a, 0xd4, 0xef, 0x72, 0xa5, 0x3c, 0x40, 0x35, 0xa4,
|
||||
0x88, 0x98, 0x2a, 0x00, 0x02, 0x0b, 0x3a, 0x9a, 0xf2, 0x1b, 0xbc, 0xa0, 0xb5, 0x24, 0x5c, 0x50,
|
||||
0x4d, 0xd8, 0x44, 0x50, 0xfd, 0x7e, 0x3a, 0xbf, 0xcc, 0xd2, 0x6a, 0x0a, 0x12, 0x41, 0xad, 0xa3,
|
||||
0x85, 0x44, 0x22, 0xd8, 0x82, 0xec, 0xbe, 0x98, 0x7e, 0x08, 0xd8, 0xbe, 0x58, 0x53, 0xc5, 0xd0,
|
||||
0xbe, 0x18, 0x64, 0xb4, 0x61, 0x1e, 0xfd, 0xb6, 0x6b, 0xf8, 0x25, 0xe7, 0xd7, 0xb3, 0xb8, 0xbc,
|
||||
0x1e, 0x3c, 0xa1, 0x95, 0x1b, 0xc6, 0x38, 0xda, 0xe8, 0xc5, 0xda, 0x90, 0xe3, 0x3a, 0xac, 0x93,
|
||||
0x98, 0x8b, 0x32, 0x03, 0x21, 0xc7, 0xb3, 0xa1, 0x11, 0x22, 0xe4, 0x10, 0xa8, 0x9d, 0x15, 0x5c,
|
||||
0x6f, 0x23, 0x06, 0x57, 0x62, 0x9e, 0xfa, 0x88, 0x51, 0x2b, 0x31, 0x04, 0x83, 0xdd, 0xf3, 0xa0,
|
||||
0x8c, 0x8b, 0x29, 0xde, 0x3d, 0xa5, 0x28, 0xdc, 0x3d, 0x1b, 0x04, 0xb6, 0xf7, 0x88, 0xc5, 0x65,
|
||||
0x32, 0xc5, 0xdb, 0x5b, 0xc9, 0xc2, 0xed, 0x6d, 0x18, 0xd8, 0xde, 0x4a, 0xf0, 0x36, 0x15, 0xd3,
|
||||
0x63, 0x26, 0x62, 0xbc, 0xbd, 0x7d, 0x26, 0xdc, 0xde, 0x2d, 0xd6, 0x66, 0x49, 0xae, 0xc3, 0xd1,
|
||||
0xfc, 0xb2, 0x4a, 0xca, 0xf4, 0x92, 0x0d, 0x02, 0x56, 0x0c, 0x44, 0x64, 0x49, 0x24, 0xac, 0x7d,
|
||||
0xfe, 0x62, 0x25, 0xba, 0xdb, 0x34, 0x3b, 0xaf, 0x2a, 0x35, 0x6b, 0x03, 0xf7, 0x2f, 0xf0, 0xf6,
|
||||
0x25, 0x70, 0x62, 0xa7, 0xb2, 0x87, 0x9a, 0x13, 0xb1, 0xf1, 0x22, 0x5d, 0xe4, 0x95, 0x29, 0xd4,
|
||||
0x97, 0x7d, 0xac, 0x3b, 0x0a, 0x44, 0xc4, 0xee, 0xa5, 0x68, 0x27, 0x4b, 0xdd, 0x3e, 0x8d, 0xec,
|
||||
0x70, 0x5c, 0x81, 0xc9, 0xb2, 0x79, 0xde, 0x0e, 0x41, 0x4c, 0x96, 0x38, 0x09, 0xbb, 0xc2, 0x41,
|
||||
0xc9, 0xe7, 0x45, 0xd5, 0xd1, 0x15, 0x00, 0x14, 0xee, 0x0a, 0x6d, 0x58, 0xfb, 0x7c, 0x17, 0xfd,
|
||||
0xae, 0xdb, 0xfd, 0xdc, 0x87, 0xbd, 0x45, 0xf7, 0x29, 0xec, 0x11, 0x0f, 0xfb, 0xe2, 0x36, 0x5d,
|
||||
0x6c, 0x3c, 0x8b, 0x3d, 0x26, 0xe2, 0x34, 0xab, 0x06, 0xab, 0xb8, 0x8d, 0x46, 0x4e, 0xa4, 0x8b,
|
||||
0x18, 0x07, 0xe3, 0xdb, 0xde, 0xbc, 0xc8, 0xd2, 0xa4, 0xbd, 0x4f, 0xac, 0x75, 0x8d, 0x38, 0x1c,
|
||||
0xdf, 0x5c, 0x0c, 0xc6, 0xeb, 0x11, 0x13, 0xea, 0x3f, 0xe7, 0xcb, 0x82, 0xe1, 0xf1, 0xda, 0x43,
|
||||
0xc2, 0xf1, 0x1a, 0xa2, 0xb0, 0x3e, 0x23, 0x26, 0x8e, 0xe2, 0x25, 0x9f, 0x13, 0xf1, 0xda, 0x88,
|
||||
0xc3, 0xf5, 0x71, 0x31, 0x9b, 0xb1, 0x19, 0x0f, 0x87, 0xb9, 0x60, 0x65, 0x1e, 0x67, 0xfb, 0x59,
|
||||
0x3c, 0xa9, 0x06, 0x44, 0x8c, 0xf1, 0x29, 0x22, 0x63, 0xa3, 0x69, 0xe4, 0x31, 0x1e, 0x56, 0xfb,
|
||||
0xf1, 0x82, 0x97, 0xa9, 0xa0, 0x1f, 0xa3, 0x45, 0x3a, 0x1f, 0xa3, 0x87, 0xa2, 0xde, 0x76, 0xca,
|
||||
0x64, 0x9a, 0x2e, 0xd8, 0x38, 0xe0, 0xad, 0x41, 0x7a, 0x78, 0x73, 0x50, 0xa4, 0xd1, 0x46, 0x7c,
|
||||
0x5e, 0x26, 0x8c, 0x6c, 0x34, 0x25, 0xee, 0x6c, 0x34, 0x83, 0x69, 0x0f, 0x7f, 0xb3, 0x12, 0xfd,
|
||||
0x9e, 0x92, 0xba, 0x9b, 0xb7, 0x7b, 0x71, 0x35, 0xbd, 0xe4, 0x71, 0x39, 0x1e, 0x7c, 0x86, 0xd9,
|
||||
0x41, 0x51, 0xe3, 0xfa, 0xd9, 0x6d, 0x54, 0xe0, 0x63, 0x3d, 0x4a, 0x2b, 0x67, 0xc4, 0xa1, 0x8f,
|
||||
0xd5, 0x43, 0xc2, 0x8f, 0x15, 0xa2, 0x30, 0x80, 0x48, 0xb9, 0xda, 0x28, 0x59, 0x25, 0xf5, 0xfd,
|
||||
0xdd, 0x92, 0xb5, 0x4e, 0x0e, 0xc6, 0xc7, 0x5a, 0xe8, 0xf7, 0x96, 0x2d, 0xca, 0x06, 0xde, 0x63,
|
||||
0x86, 0x7d, 0x71, 0xd2, 0xb3, 0x19, 0x15, 0x61, 0xcf, 0xad, 0x91, 0x31, 0xec, 0x8b, 0x13, 0x9e,
|
||||
0x9d, 0xb0, 0x16, 0xf2, 0x8c, 0x84, 0xb6, 0x61, 0x5f, 0x1c, 0x66, 0x5f, 0x9a, 0x69, 0xe6, 0x85,
|
||||
0x27, 0x01, 0x3b, 0x70, 0x6e, 0xd8, 0xe8, 0xc5, 0x6a, 0x87, 0x7f, 0xb7, 0x12, 0x7d, 0xdf, 0x7a,
|
||||
0x3c, 0xe6, 0xe3, 0xf4, 0x6a, 0xa9, 0xa0, 0x37, 0x71, 0x36, 0x67, 0xd5, 0xe0, 0x19, 0x65, 0xad,
|
||||
0xcd, 0x9a, 0x12, 0x3c, 0xbf, 0x95, 0x0e, 0x1c, 0x3b, 0x3b, 0x45, 0x91, 0x2d, 0xcf, 0xd9, 0xac,
|
||||
0xc8, 0xc8, 0xb1, 0xe3, 0x21, 0xe1, 0xb1, 0x03, 0x51, 0x98, 0x95, 0x9f, 0xf3, 0x3a, 0xe7, 0x47,
|
||||
0xb3, 0x72, 0x29, 0x0a, 0x67, 0xe5, 0x0d, 0x02, 0x73, 0xa5, 0x73, 0xbe, 0xcb, 0xb3, 0x8c, 0x25,
|
||||
0xa2, 0xfd, 0x02, 0xd8, 0x68, 0x5a, 0x22, 0x9c, 0x2b, 0x01, 0xd2, 0xee, 0x95, 0x34, 0xeb, 0xd3,
|
||||
0xb8, 0x64, 0x2f, 0x97, 0x47, 0x69, 0x7e, 0x3d, 0xc0, 0xd3, 0x02, 0x0b, 0x10, 0x7b, 0x25, 0x28,
|
||||
0x08, 0xd7, 0xc1, 0x17, 0xf9, 0x98, 0xe3, 0xeb, 0xe0, 0x5a, 0x12, 0x5e, 0x07, 0x6b, 0x02, 0x9a,
|
||||
0x3c, 0x63, 0x94, 0xc9, 0x5a, 0x12, 0x36, 0xa9, 0x09, 0x2c, 0x14, 0xea, 0x1d, 0x75, 0x32, 0x14,
|
||||
0x82, 0x3d, 0xf4, 0xb5, 0x4e, 0x0e, 0xf6, 0xd0, 0x66, 0xd1, 0xba, 0xcf, 0x44, 0x32, 0xc5, 0x7b,
|
||||
0xa8, 0x87, 0x84, 0x7b, 0x28, 0x44, 0x61, 0x95, 0xce, 0xb9, 0x59, 0x74, 0xaf, 0xe2, 0xfd, 0xa3,
|
||||
0xb5, 0xe0, 0x5e, 0xeb, 0xe4, 0xe0, 0x32, 0xf2, 0x70, 0x26, 0x9f, 0x19, 0xda, 0xc9, 0x95, 0x2c,
|
||||
0xbc, 0x8c, 0x34, 0x0c, 0x2c, 0xbd, 0x12, 0xd4, 0x8f, 0x13, 0x2f, 0xbd, 0x95, 0x87, 0x4b, 0xef,
|
||||
0x71, 0xda, 0xc9, 0xbf, 0x9a, 0x65, 0x9c, 0x92, 0x9e, 0xf0, 0x7a, 0x8c, 0xbc, 0x89, 0xb3, 0x74,
|
||||
0x1c, 0x0b, 0x76, 0xce, 0xaf, 0x59, 0x8e, 0xaf, 0x98, 0x74, 0x69, 0x15, 0x3f, 0xf4, 0x14, 0xc2,
|
||||
0x2b, 0xa6, 0xb0, 0x22, 0xec, 0x27, 0x8a, 0xbe, 0xa8, 0xd8, 0x6e, 0x5c, 0x11, 0x91, 0xcc, 0x43,
|
||||
0xc2, 0xfd, 0x04, 0xa2, 0x30, 0x5f, 0x55, 0xf2, 0x57, 0xef, 0x0a, 0x56, 0xa6, 0x2c, 0x4f, 0x18,
|
||||
0x9e, 0xaf, 0x42, 0x2a, 0x9c, 0xaf, 0x22, 0x74, 0x6b, 0x9b, 0xc6, 0x04, 0xa7, 0xf6, 0x19, 0x0e,
|
||||
0x48, 0x04, 0xce, 0x70, 0x10, 0x28, 0xac, 0xa4, 0x05, 0xd0, 0x6d, 0xd4, 0x96, 0x95, 0xe0, 0x36,
|
||||
0x2a, 0x4d, 0xb7, 0x36, 0xbf, 0x0c, 0x33, 0xaa, 0x87, 0x49, 0x47, 0xd1, 0x47, 0xee, 0x70, 0xd9,
|
||||
0xe8, 0xc5, 0xe2, 0xbb, 0x6d, 0x67, 0x2c, 0x8b, 0xe5, 0x14, 0x12, 0xd8, 0xd2, 0x6a, 0x98, 0x3e,
|
||||
0xbb, 0x6d, 0x0e, 0xab, 0x1d, 0xfe, 0xd5, 0x4a, 0xf4, 0x09, 0xe6, 0xf1, 0x75, 0x21, 0xfd, 0x3e,
|
||||
0xed, 0xb6, 0xa5, 0x48, 0xe2, 0x90, 0x4a, 0x58, 0x43, 0x97, 0xe1, 0xcf, 0xa2, 0x8f, 0x1b, 0x91,
|
||||
0x3d, 0xc3, 0xa2, 0x0b, 0xe0, 0x27, 0x50, 0xa6, 0xfc, 0x90, 0x33, 0xee, 0xb7, 0x7b, 0xf3, 0x76,
|
||||
0x6d, 0xe2, 0x97, 0xab, 0x02, 0x6b, 0x13, 0x63, 0x43, 0x8b, 0x89, 0xb5, 0x09, 0x82, 0xd9, 0x5d,
|
||||
0x0d, 0xb7, 0x7a, 0x6f, 0x53, 0x31, 0x95, 0xb9, 0x0f, 0xd8, 0xd5, 0xf0, 0xca, 0x6a, 0x20, 0x62,
|
||||
0x57, 0x83, 0x84, 0x61, 0x76, 0xd0, 0x80, 0xf5, 0xd8, 0xc4, 0xe2, 0xaa, 0x31, 0xe4, 0x8e, 0xcc,
|
||||
0xf5, 0x6e, 0x10, 0xf6, 0xd7, 0x46, 0xac, 0x97, 0x21, 0x4f, 0x42, 0x16, 0xc0, 0x52, 0x64, 0xa3,
|
||||
0x17, 0xab, 0x1d, 0xfe, 0x45, 0xf4, 0xbd, 0x56, 0xc5, 0xf6, 0x59, 0x2c, 0xe6, 0x25, 0x1b, 0x0f,
|
||||
0xb6, 0x3b, 0xca, 0xdd, 0x80, 0xc6, 0xf5, 0xd3, 0xfe, 0x0a, 0xad, 0x7c, 0xb9, 0xe1, 0x54, 0xb7,
|
||||
0x32, 0x65, 0x78, 0x16, 0x32, 0xe9, 0xb3, 0xc1, 0x7c, 0x99, 0xd6, 0x69, 0x2d, 0x79, 0xdd, 0xde,
|
||||
0xb5, 0xb3, 0x88, 0xd3, 0x4c, 0xbe, 0xce, 0xfa, 0x2c, 0x64, 0xd4, 0x43, 0x83, 0x4b, 0x5e, 0x52,
|
||||
0xa5, 0x15, 0x99, 0xe5, 0x18, 0x77, 0x96, 0x4a, 0x9b, 0x74, 0x24, 0x40, 0x56, 0x4a, 0x5b, 0x3d,
|
||||
0x69, 0xed, 0x56, 0x34, 0x5b, 0x85, 0xf5, 0xcf, 0x6e, 0x27, 0xc7, 0xbc, 0x6a, 0x55, 0xa4, 0xa7,
|
||||
0x6f, 0xf5, 0xa4, 0xb5, 0xd7, 0x3f, 0x8f, 0x3e, 0x6e, 0x7b, 0xd5, 0x13, 0xd1, 0x76, 0xa7, 0x29,
|
||||
0x30, 0x17, 0x3d, 0xed, 0xaf, 0x60, 0x97, 0x17, 0x5f, 0xa5, 0x95, 0xe0, 0xe5, 0x72, 0x34, 0xe5,
|
||||
0x37, 0xcd, 0xd9, 0x70, 0x7f, 0xb4, 0x6a, 0x60, 0xe8, 0x10, 0xc4, 0xf2, 0x02, 0x27, 0x5b, 0xae,
|
||||
0xec, 0x19, 0xf2, 0x8a, 0x70, 0xe5, 0x10, 0x1d, 0xae, 0x7c, 0xd2, 0xc6, 0xaa, 0xa6, 0x56, 0xf6,
|
||||
0xc0, 0xfb, 0x1a, 0x5e, 0xd4, 0xf6, 0xa1, 0xf7, 0xf5, 0x6e, 0xd0, 0x66, 0x2c, 0x5a, 0xbc, 0x97,
|
||||
0x5e, 0x5d, 0x99, 0x3a, 0xe1, 0x25, 0x75, 0x11, 0x22, 0x63, 0x21, 0x50, 0x9b, 0x00, 0xef, 0xa7,
|
||||
0x19, 0x93, 0xbb, 0xeb, 0xaf, 0xaf, 0xae, 0x32, 0x1e, 0x8f, 0x41, 0x02, 0x5c, 0x8b, 0x87, 0xae,
|
||||
0x9c, 0x48, 0x80, 0x31, 0xce, 0xbe, 0x51, 0xac, 0xa5, 0x67, 0x2c, 0xe1, 0x79, 0x92, 0x66, 0xf0,
|
||||
0xa8, 0x9c, 0xd4, 0x34, 0x42, 0xe2, 0x8d, 0x62, 0x0b, 0xb2, 0x13, 0x63, 0x2d, 0xaa, 0x87, 0x7d,
|
||||
0x53, 0xfe, 0x47, 0x6d, 0x45, 0x47, 0x4c, 0x4c, 0x8c, 0x08, 0x66, 0xd7, 0x81, 0xb5, 0xf0, 0xa2,
|
||||
0x90, 0xc6, 0xef, 0xb5, 0xb5, 0x94, 0x84, 0x58, 0x07, 0xfa, 0x84, 0x5d, 0xcf, 0xd4, 0xbf, 0xef,
|
||||
0xf1, 0x9b, 0x5c, 0x1a, 0x7d, 0xd0, 0x56, 0x69, 0x64, 0xc4, 0x7a, 0x06, 0x32, 0xda, 0xf0, 0x4f,
|
||||
0xa2, 0xff, 0x2f, 0x0d, 0x97, 0xbc, 0x18, 0xdc, 0x41, 0x14, 0x4a, 0xe7, 0x54, 0xdb, 0x5d, 0x52,
|
||||
0x6e, 0x0f, 0x67, 0x9a, 0xbe, 0x71, 0x51, 0xc5, 0x13, 0x36, 0x78, 0x48, 0xb4, 0xb8, 0x94, 0x12,
|
||||
0x87, 0x33, 0xdb, 0x94, 0xdf, 0x2b, 0x4e, 0xf8, 0x58, 0x5b, 0x47, 0x6a, 0x68, 0x84, 0xa1, 0x5e,
|
||||
0xe1, 0x42, 0x36, 0x99, 0x39, 0x89, 0x17, 0xe9, 0xc4, 0x4c, 0x38, 0x2a, 0x6e, 0x55, 0x20, 0x99,
|
||||
0xb1, 0xcc, 0xd0, 0x81, 0x88, 0x64, 0x86, 0x84, 0xb5, 0xcf, 0x7f, 0x59, 0x89, 0xee, 0x59, 0xe6,
|
||||
0xa0, 0xd9, 0x39, 0x3b, 0xcc, 0xaf, 0x78, 0x9d, 0xfa, 0x1c, 0xa5, 0xf9, 0x75, 0x35, 0xf8, 0x82,
|
||||
0x32, 0x89, 0xf3, 0xa6, 0x28, 0x5f, 0xde, 0x5a, 0xcf, 0x66, 0xad, 0xcd, 0xb6, 0x92, 0x7d, 0xb7,
|
||||
0xac, 0x34, 0x40, 0xd6, 0x6a, 0x76, 0x9f, 0x20, 0x47, 0x64, 0xad, 0x21, 0xde, 0x36, 0xb1, 0x71,
|
||||
0x9e, 0xf1, 0x1c, 0x36, 0xb1, 0xb5, 0x50, 0x0b, 0x89, 0x26, 0x6e, 0x41, 0x36, 0x1e, 0x37, 0x22,
|
||||
0xb5, 0x03, 0xb2, 0x93, 0x65, 0x20, 0x1e, 0x1b, 0x55, 0x03, 0x10, 0xf1, 0x18, 0x05, 0xb5, 0x9f,
|
||||
0xb3, 0xe8, 0x3b, 0xf5, 0x23, 0x3d, 0x2d, 0xd9, 0x22, 0x65, 0xf0, 0x88, 0x85, 0x23, 0x21, 0xc6,
|
||||
0xbf, 0x4f, 0xd8, 0x91, 0x75, 0x91, 0x57, 0x45, 0x16, 0x57, 0x53, 0xfd, 0x62, 0xdc, 0xaf, 0x73,
|
||||
0x23, 0x84, 0xaf, 0xc6, 0x1f, 0x75, 0x50, 0x36, 0xa8, 0x37, 0x32, 0x13, 0x62, 0x56, 0x71, 0xd5,
|
||||
0x56, 0x98, 0x59, 0xeb, 0xe4, 0xec, 0xee, 0xf3, 0x41, 0x9c, 0x65, 0xac, 0x5c, 0x36, 0xb2, 0xe3,
|
||||
0x38, 0x4f, 0xaf, 0x58, 0x25, 0xc0, 0xee, 0xb3, 0xa6, 0x86, 0x10, 0x23, 0x76, 0x9f, 0x03, 0xb8,
|
||||
0xcd, 0xe6, 0x81, 0xe7, 0xc3, 0x7c, 0xcc, 0xde, 0x81, 0x6c, 0x1e, 0xda, 0x91, 0x0c, 0x91, 0xcd,
|
||||
0x53, 0xac, 0xdd, 0x85, 0x7d, 0x99, 0xf1, 0xe4, 0x5a, 0x4f, 0x01, 0x7e, 0x03, 0x4b, 0x09, 0x9c,
|
||||
0x03, 0x1e, 0x84, 0x10, 0x3b, 0x09, 0x48, 0xc1, 0x19, 0x2b, 0xb2, 0x38, 0x81, 0x67, 0x61, 0x94,
|
||||
0x8e, 0x96, 0x11, 0x93, 0x00, 0x64, 0x40, 0x71, 0xf5, 0x19, 0x1b, 0xac, 0xb8, 0xe0, 0x88, 0xcd,
|
||||
0x83, 0x10, 0x62, 0xa7, 0x41, 0x29, 0x18, 0x15, 0x59, 0x2a, 0xc0, 0x30, 0x50, 0x1a, 0x52, 0x42,
|
||||
0x0c, 0x03, 0x9f, 0x00, 0x26, 0x8f, 0x59, 0x39, 0x61, 0xa8, 0x49, 0x29, 0x09, 0x9a, 0x6c, 0x08,
|
||||
0x7b, 0x1c, 0x53, 0xd5, 0x9d, 0x17, 0x4b, 0x70, 0x1c, 0x53, 0x57, 0x8b, 0x17, 0x4b, 0xe2, 0x38,
|
||||
0xa6, 0x07, 0x80, 0x22, 0x9e, 0xc6, 0x95, 0xc0, 0x8b, 0x28, 0x25, 0xc1, 0x22, 0x36, 0x84, 0x9d,
|
||||
0xa3, 0x55, 0x11, 0xe7, 0x02, 0xcc, 0xd1, 0xba, 0x00, 0xce, 0xdb, 0xe0, 0xbb, 0xa4, 0xdc, 0x46,
|
||||
0x12, 0xd5, 0x2a, 0x4c, 0xec, 0xa7, 0x2c, 0x1b, 0x57, 0x20, 0x92, 0xe8, 0xe7, 0xde, 0x48, 0x89,
|
||||
0x48, 0xd2, 0xa6, 0x40, 0x57, 0xd2, 0x7b, 0xd5, 0x58, 0xed, 0xc0, 0x36, 0xf5, 0x83, 0x10, 0x62,
|
||||
0xe3, 0x53, 0x53, 0xe8, 0xdd, 0xb8, 0x2c, 0xd3, 0x7a, 0xf2, 0x5f, 0xc5, 0x0b, 0xd4, 0xc8, 0x89,
|
||||
0xf8, 0x84, 0x71, 0x60, 0x78, 0x35, 0x81, 0x1b, 0x2b, 0x18, 0x0c, 0xdd, 0x9f, 0x06, 0x19, 0x9b,
|
||||
0x71, 0x4a, 0x89, 0xf3, 0x3a, 0x13, 0x7b, 0x9a, 0xc8, 0xdb, 0xcc, 0xd5, 0x2e, 0xcc, 0xf9, 0x5c,
|
||||
0xc2, 0xb8, 0x38, 0xe6, 0x0b, 0x76, 0xce, 0x5f, 0xbd, 0x4b, 0x2b, 0x91, 0xe6, 0x13, 0x3d, 0x73,
|
||||
0x3f, 0x27, 0x2c, 0x61, 0x30, 0xf1, 0xb9, 0x44, 0xa7, 0x92, 0x4d, 0x20, 0x40, 0x59, 0x4e, 0xd8,
|
||||
0x0d, 0x9a, 0x40, 0x40, 0x8b, 0x86, 0x23, 0x12, 0x88, 0x10, 0x6f, 0xf7, 0x51, 0x8c, 0x73, 0xfd,
|
||||
0x4d, 0xe9, 0x39, 0x6f, 0x72, 0x39, 0xca, 0x1a, 0x04, 0x89, 0xa5, 0x6c, 0x50, 0xc1, 0xae, 0x2f,
|
||||
0x8d, 0x7f, 0x3b, 0xc4, 0xd6, 0x09, 0x3b, 0xed, 0x61, 0xf6, 0xb8, 0x07, 0x89, 0xb8, 0xb2, 0xef,
|
||||
0xe4, 0x29, 0x57, 0xed, 0x57, 0xf2, 0x8f, 0x7b, 0x90, 0xce, 0x9e, 0x8c, 0x5b, 0xad, 0x97, 0x71,
|
||||
0x72, 0x3d, 0x29, 0xf9, 0x3c, 0x1f, 0xef, 0xf2, 0x8c, 0x97, 0x60, 0x4f, 0xc6, 0x2b, 0x35, 0x40,
|
||||
0x89, 0x3d, 0x99, 0x0e, 0x15, 0x9b, 0xc1, 0xb9, 0xa5, 0xd8, 0xc9, 0xd2, 0x09, 0x5c, 0x51, 0x7b,
|
||||
0x86, 0x24, 0x40, 0x64, 0x70, 0x28, 0x88, 0x74, 0x22, 0xb5, 0xe2, 0x16, 0x69, 0x12, 0x67, 0xca,
|
||||
0xdf, 0x36, 0x6d, 0xc6, 0x03, 0x3b, 0x3b, 0x11, 0xa2, 0x80, 0xd4, 0xf3, 0x7c, 0x5e, 0xe6, 0x87,
|
||||
0xb9, 0xe0, 0x64, 0x3d, 0x1b, 0xa0, 0xb3, 0x9e, 0x0e, 0x08, 0xc2, 0xea, 0x39, 0x7b, 0x57, 0x97,
|
||||
0xa6, 0xfe, 0x07, 0x0b, 0xab, 0xf5, 0xef, 0x43, 0x2d, 0x0f, 0x85, 0x55, 0xc0, 0x81, 0xca, 0x68,
|
||||
0x27, 0xaa, 0xc3, 0x04, 0xb4, 0xfd, 0x6e, 0xb2, 0xde, 0x0d, 0xe2, 0x7e, 0x46, 0x62, 0x99, 0xb1,
|
||||
0x90, 0x1f, 0x09, 0xf4, 0xf1, 0xd3, 0x80, 0x76, 0xbb, 0xc5, 0xab, 0xcf, 0x94, 0x25, 0xd7, 0xad,
|
||||
0x23, 0x46, 0x7e, 0x41, 0x15, 0x42, 0x6c, 0xb7, 0x10, 0x28, 0xde, 0x44, 0x87, 0x09, 0xcf, 0x43,
|
||||
0x4d, 0x54, 0xcb, 0xfb, 0x34, 0x91, 0xe6, 0xec, 0xe2, 0xd7, 0x48, 0x75, 0xcf, 0x54, 0xcd, 0xb4,
|
||||
0x41, 0x58, 0x70, 0x21, 0x62, 0xf1, 0x4b, 0xc2, 0x36, 0x27, 0x87, 0x3e, 0x8f, 0xdb, 0xe7, 0xaf,
|
||||
0x5b, 0x56, 0x8e, 0xe9, 0xf3, 0xd7, 0x14, 0x4b, 0x57, 0x52, 0xf5, 0x91, 0x0e, 0x2b, 0x7e, 0x3f,
|
||||
0xd9, 0xec, 0x07, 0xdb, 0x25, 0x8f, 0xe7, 0x73, 0x37, 0x63, 0x71, 0xa9, 0xbc, 0x6e, 0x05, 0x0c,
|
||||
0x59, 0x8c, 0x58, 0xf2, 0x04, 0x70, 0x10, 0xc2, 0x3c, 0xcf, 0xbb, 0x3c, 0x17, 0x2c, 0x17, 0x58,
|
||||
0x08, 0xf3, 0x8d, 0x69, 0x30, 0x14, 0xc2, 0x28, 0x05, 0xd0, 0x6f, 0xe5, 0x7e, 0x10, 0x13, 0x27,
|
||||
0xf1, 0x0c, 0xcd, 0xd8, 0xd4, 0x5e, 0x8f, 0x92, 0x87, 0xfa, 0x2d, 0xe0, 0x9c, 0x97, 0x7c, 0xae,
|
||||
0x97, 0xf3, 0xb8, 0x9c, 0x98, 0xdd, 0x8d, 0xf1, 0xe0, 0x29, 0x6d, 0xc7, 0x27, 0x89, 0x97, 0x7c,
|
||||
0x61, 0x0d, 0x10, 0x76, 0x0e, 0x67, 0xf1, 0xc4, 0xd4, 0x14, 0xa9, 0x81, 0x94, 0xb7, 0xaa, 0xba,
|
||||
0xde, 0x0d, 0x02, 0x3f, 0x6f, 0xd2, 0x31, 0xe3, 0x01, 0x3f, 0x52, 0xde, 0xc7, 0x0f, 0x04, 0x41,
|
||||
0xf6, 0x56, 0xd7, 0x5b, 0xad, 0xe8, 0x76, 0xf2, 0xb1, 0x5e, 0xc7, 0x0e, 0x89, 0xc7, 0x03, 0xb8,
|
||||
0x50, 0xf6, 0x46, 0xf0, 0x60, 0x8c, 0x36, 0x1b, 0xb4, 0xa1, 0x31, 0x6a, 0xf6, 0x5f, 0xfb, 0x8c,
|
||||
0x51, 0x0c, 0xd6, 0x3e, 0x7f, 0xa6, 0xc7, 0xe8, 0x5e, 0x2c, 0xe2, 0x3a, 0x6f, 0x7f, 0x93, 0xb2,
|
||||
0x1b, 0xbd, 0x10, 0x46, 0xea, 0xdb, 0x50, 0x43, 0xf9, 0x51, 0x1f, 0x58, 0x15, 0x6f, 0xf7, 0xe6,
|
||||
0x03, 0xbe, 0xf5, 0x0a, 0xa1, 0xd3, 0x37, 0x58, 0x2a, 0x6c, 0xf7, 0xe6, 0x03, 0xbe, 0xf5, 0xd7,
|
||||
0xc2, 0x9d, 0xbe, 0xc1, 0x27, 0xc3, 0xdb, 0xbd, 0x79, 0xed, 0xfb, 0xaf, 0x9b, 0x81, 0xeb, 0x3a,
|
||||
0xaf, 0xf3, 0xb0, 0x44, 0xa4, 0x0b, 0x86, 0xa5, 0x93, 0xbe, 0x3d, 0x83, 0x86, 0xd2, 0x49, 0x5a,
|
||||
0xc5, 0xb9, 0x62, 0x06, 0x2b, 0xc5, 0x29, 0xaf, 0x52, 0xf9, 0x92, 0xfe, 0x79, 0x0f, 0xa3, 0x0d,
|
||||
0x1c, 0x5a, 0x34, 0x85, 0x94, 0xec, 0xeb, 0x46, 0x0f, 0xb5, 0x27, 0x8a, 0x37, 0x03, 0xf6, 0xda,
|
||||
0x07, 0x8b, 0xb7, 0x7a, 0xd2, 0xf6, 0xc5, 0x9f, 0xc7, 0xb8, 0x6f, 0x1c, 0x43, 0xad, 0x8a, 0xbe,
|
||||
0x74, 0x7c, 0xda, 0x5f, 0x41, 0xbb, 0xff, 0xdb, 0x66, 0x5d, 0x01, 0xfd, 0xeb, 0x41, 0xf0, 0xac,
|
||||
0x8f, 0x45, 0x30, 0x10, 0x9e, 0xdf, 0x4a, 0x47, 0x17, 0xe4, 0x1f, 0x9b, 0x05, 0x74, 0x83, 0xca,
|
||||
0xef, 0x2a, 0x5e, 0x97, 0x63, 0x56, 0xea, 0x31, 0x11, 0x6a, 0x56, 0x0b, 0xc3, 0x91, 0xf1, 0xe2,
|
||||
0x96, 0x5a, 0xce, 0x85, 0x43, 0x1e, 0xac, 0xbf, 0x2d, 0x74, 0xca, 0x13, 0xb2, 0xec, 0xd0, 0xb0,
|
||||
0x40, 0x5f, 0xdc, 0x56, 0x8d, 0x1a, 0x2b, 0x0e, 0x2c, 0xef, 0x2f, 0x78, 0xde, 0xd3, 0xb0, 0x77,
|
||||
0xa3, 0xc1, 0xe7, 0xb7, 0x53, 0xd2, 0x65, 0xf9, 0xcf, 0x95, 0xe8, 0x91, 0xc7, 0xda, 0xf7, 0x09,
|
||||
0x60, 0xd7, 0xe3, 0xc7, 0x01, 0xfb, 0x94, 0x92, 0x29, 0xdc, 0xef, 0xff, 0x6a, 0xca, 0xf6, 0x76,
|
||||
0x1e, 0x4f, 0x65, 0x3f, 0xcd, 0x04, 0x2b, 0xdb, 0xb7, 0xf3, 0xf8, 0x76, 0x15, 0x35, 0xa4, 0x6f,
|
||||
0xe7, 0x09, 0xe0, 0xce, 0xed, 0x3c, 0x88, 0x67, 0xf4, 0x76, 0x1e, 0xd4, 0x5a, 0xf0, 0x76, 0x9e,
|
||||
0xb0, 0x06, 0x15, 0xde, 0x9b, 0x22, 0xa8, 0x7d, 0xeb, 0x5e, 0x16, 0xfd, 0x6d, 0xec, 0x67, 0xb7,
|
||||
0x51, 0x21, 0x26, 0x38, 0xc5, 0xc9, 0x73, 0x6e, 0x3d, 0x9e, 0xa9, 0x77, 0xd6, 0x6d, 0xbb, 0x37,
|
||||
0xaf, 0x7d, 0xff, 0x54, 0xaf, 0x6e, 0x4c, 0x38, 0xe7, 0xa5, 0xbc, 0x99, 0x69, 0x23, 0x14, 0x9e,
|
||||
0x6b, 0x0b, 0x6e, 0xcb, 0x6f, 0xf6, 0x83, 0x89, 0xea, 0xd6, 0x84, 0x6e, 0xf4, 0x61, 0x97, 0x21,
|
||||
0xd0, 0xe4, 0xdb, 0xbd, 0x79, 0x62, 0x1a, 0x51, 0xbe, 0x55, 0x6b, 0xf7, 0x30, 0xe6, 0xb7, 0xf5,
|
||||
0xd3, 0xfe, 0x0a, 0xda, 0xfd, 0x42, 0xa7, 0x8d, 0xae, 0x7b, 0xd9, 0xce, 0x5b, 0x5d, 0xa6, 0x46,
|
||||
0x5e, 0x33, 0x0f, 0xfb, 0xe2, 0xa1, 0x04, 0xc2, 0x9d, 0x42, 0xbb, 0x12, 0x08, 0x74, 0x1a, 0xfd,
|
||||
0xfc, 0x76, 0x4a, 0xba, 0x2c, 0xff, 0xbc, 0x12, 0xdd, 0x25, 0xcb, 0xa2, 0xfb, 0xc1, 0x17, 0x7d,
|
||||
0x2d, 0x83, 0xfe, 0xf0, 0xe5, 0xad, 0xf5, 0x74, 0xa1, 0xfe, 0x6d, 0x25, 0xba, 0x17, 0x28, 0x94,
|
||||
0xea, 0x20, 0xb7, 0xb0, 0xee, 0x77, 0x94, 0x1f, 0xde, 0x5e, 0x91, 0x9a, 0xee, 0x5d, 0x7c, 0xd4,
|
||||
0xbe, 0xb6, 0x26, 0x60, 0x7b, 0x44, 0x5f, 0x5b, 0xd3, 0xad, 0x05, 0x37, 0x79, 0xe2, 0xcb, 0x66,
|
||||
0xd1, 0x85, 0x6e, 0xf2, 0xc8, 0x13, 0x6a, 0x60, 0xcd, 0xb1, 0xd6, 0xc9, 0x61, 0x4e, 0x5e, 0xbd,
|
||||
0x2b, 0xe2, 0x7c, 0x4c, 0x3b, 0x51, 0xf2, 0x6e, 0x27, 0x86, 0x83, 0x9b, 0x63, 0xb5, 0xf4, 0x8c,
|
||||
0x37, 0x0b, 0xa9, 0xc7, 0x94, 0xbe, 0x41, 0x82, 0x9b, 0x63, 0x2d, 0x94, 0xf0, 0xa6, 0xb3, 0xc6,
|
||||
0x90, 0x37, 0x90, 0x2c, 0x3e, 0xe9, 0x83, 0x82, 0x14, 0xdd, 0x78, 0x33, 0x7b, 0xee, 0x9b, 0x21,
|
||||
0x2b, 0xad, 0x7d, 0xf7, 0xad, 0x9e, 0x34, 0xe1, 0x76, 0xc4, 0xc4, 0x57, 0x2c, 0x1e, 0xb3, 0x32,
|
||||
0xe8, 0xd6, 0x50, 0xbd, 0xdc, 0xba, 0x34, 0xe6, 0x76, 0x97, 0x67, 0xf3, 0x59, 0xae, 0x1b, 0x93,
|
||||
0x74, 0xeb, 0x52, 0xdd, 0x6e, 0x01, 0x0d, 0xb7, 0x05, 0xad, 0x5b, 0x99, 0x5e, 0x3e, 0x09, 0x9b,
|
||||
0xf1, 0xb2, 0xca, 0x8d, 0x5e, 0x2c, 0x5d, 0x4f, 0xdd, 0x8d, 0x3a, 0xea, 0x09, 0x7a, 0xd2, 0x56,
|
||||
0x4f, 0x1a, 0xee, 0xcf, 0x39, 0x6e, 0x4d, 0x7f, 0xda, 0xee, 0xb0, 0xd5, 0xea, 0x52, 0x4f, 0xfb,
|
||||
0x2b, 0xc0, 0xdd, 0x50, 0xdd, 0xab, 0x8e, 0xd2, 0x4a, 0xec, 0xa7, 0x59, 0x36, 0xd8, 0x08, 0x74,
|
||||
0x93, 0x06, 0x0a, 0xee, 0x86, 0x22, 0x30, 0xd1, 0x93, 0x9b, 0xdd, 0xc3, 0x7c, 0xd0, 0x65, 0x47,
|
||||
0x52, 0xbd, 0x7a, 0xb2, 0x4b, 0x83, 0x1d, 0x2d, 0xe7, 0x51, 0x9b, 0xda, 0x0e, 0xc3, 0x0f, 0xae,
|
||||
0x55, 0xe1, 0xed, 0xde, 0x3c, 0x78, 0xdd, 0x2e, 0x29, 0x39, 0xb3, 0x3c, 0xa4, 0x4c, 0x78, 0x33,
|
||||
0xc9, 0xa3, 0x0e, 0x0a, 0xec, 0x0a, 0xaa, 0x61, 0xf4, 0x36, 0x1d, 0x4f, 0x98, 0x40, 0xdf, 0x14,
|
||||
0xb9, 0x40, 0xf0, 0x4d, 0x11, 0x00, 0x41, 0xd3, 0xa9, 0xdf, 0xcd, 0x76, 0xe8, 0xe1, 0x18, 0x6b,
|
||||
0x3a, 0xad, 0xec, 0x50, 0xa1, 0xa6, 0x43, 0x69, 0x10, 0x0d, 0x8c, 0x5b, 0xfd, 0x69, 0xfc, 0x93,
|
||||
0x90, 0x19, 0xf0, 0x7d, 0xfc, 0x46, 0x2f, 0x16, 0xcc, 0x28, 0xd6, 0x61, 0x3a, 0x4b, 0x05, 0x36,
|
||||
0xa3, 0x38, 0x36, 0x6a, 0x24, 0x34, 0xa3, 0xb4, 0x51, 0xaa, 0x7a, 0x75, 0x8e, 0x70, 0x38, 0x0e,
|
||||
0x57, 0x4f, 0x31, 0xfd, 0xaa, 0x67, 0xd8, 0xd6, 0x8b, 0xcd, 0xdc, 0x74, 0x19, 0x31, 0xd5, 0x8b,
|
||||
0x65, 0xa4, 0x6f, 0xcb, 0x4f, 0x26, 0x21, 0x18, 0x8a, 0x3a, 0x94, 0x02, 0xdc, 0xb0, 0xaf, 0xb9,
|
||||
0xe6, 0xdd, 0x6b, 0x51, 0xb0, 0xb8, 0x8c, 0xf3, 0x04, 0x5d, 0x9c, 0x4a, 0x83, 0x2d, 0x32, 0xb4,
|
||||
0x38, 0x25, 0x35, 0xc0, 0x6b, 0x73, 0xff, 0x63, 0x47, 0x64, 0x28, 0x98, 0xaf, 0x0a, 0xfd, 0x6f,
|
||||
0x1d, 0x1f, 0xf7, 0x20, 0xe1, 0x6b, 0xf3, 0x06, 0x30, 0x1b, 0xdf, 0xca, 0xe9, 0x67, 0x01, 0x53,
|
||||
0x3e, 0x1a, 0x5a, 0x08, 0xd3, 0x2a, 0xa0, 0x53, 0x9b, 0x04, 0x97, 0x89, 0x9f, 0xb0, 0x25, 0xd6,
|
||||
0xa9, 0x6d, 0x7e, 0x2a, 0x91, 0x50, 0xa7, 0x6e, 0xa3, 0x20, 0xcf, 0x74, 0xd7, 0x41, 0xab, 0x01,
|
||||
0x7d, 0x77, 0xe9, 0xb3, 0xd6, 0xc9, 0x81, 0x91, 0xb3, 0x97, 0x2e, 0xbc, 0xf7, 0x04, 0x48, 0x41,
|
||||
0xf7, 0xd2, 0x05, 0xfe, 0x9a, 0x60, 0xa3, 0x17, 0x0b, 0x5f, 0xc9, 0xc7, 0x82, 0xbd, 0x6b, 0xde,
|
||||
0x95, 0x23, 0xc5, 0x95, 0xf2, 0xd6, 0xcb, 0xf2, 0xf5, 0x6e, 0xd0, 0x1e, 0x80, 0x3d, 0x2d, 0x79,
|
||||
0xc2, 0xaa, 0x4a, 0xdf, 0xe5, 0xe7, 0x9f, 0x30, 0xd2, 0xb2, 0x21, 0xb8, 0xc9, 0xef, 0x61, 0x18,
|
||||
0xb2, 0x2d, 0xa3, 0x45, 0xf6, 0x06, 0x9a, 0x55, 0x54, 0xb3, 0x7d, 0xf9, 0xcc, 0x5a, 0x27, 0x67,
|
||||
0x87, 0x97, 0x96, 0xba, 0x57, 0xce, 0xac, 0xa3, 0xea, 0xd8, 0x6d, 0x33, 0x8f, 0x7b, 0x90, 0xda,
|
||||
0xd5, 0x57, 0xd1, 0xfb, 0x47, 0x7c, 0x32, 0x62, 0xf9, 0x78, 0xf0, 0x03, 0xff, 0x08, 0x2d, 0x9f,
|
||||
0x0c, 0xeb, 0x9f, 0x8d, 0xd1, 0x3b, 0x94, 0xd8, 0x1e, 0x02, 0xdc, 0x63, 0x97, 0xf3, 0xc9, 0x48,
|
||||
0xc4, 0x02, 0x1c, 0x02, 0x94, 0xbf, 0x0f, 0x6b, 0x01, 0x71, 0x08, 0xd0, 0x03, 0x80, 0xbd, 0xf3,
|
||||
0x92, 0x31, 0xd4, 0x5e, 0x2d, 0x08, 0xda, 0xd3, 0x80, 0xcd, 0x22, 0x8c, 0xbd, 0x3a, 0x51, 0x87,
|
||||
0x87, 0xf6, 0xac, 0x8e, 0x94, 0x12, 0x59, 0x44, 0x9b, 0xb2, 0x9d, 0x5b, 0x55, 0x5f, 0xde, 0x00,
|
||||
0x32, 0x9f, 0xcd, 0xe2, 0x72, 0x09, 0x3a, 0xb7, 0xae, 0xa5, 0x03, 0x10, 0x9d, 0x1b, 0x05, 0xed,
|
||||
0xa8, 0x6d, 0x1e, 0x73, 0x72, 0x7d, 0xc0, 0x4b, 0x3e, 0x17, 0x69, 0xce, 0xe0, 0x2d, 0x10, 0xe6,
|
||||
0x81, 0xba, 0x0c, 0x31, 0x6a, 0x29, 0xd6, 0x66, 0xb9, 0x92, 0x50, 0xe7, 0x09, 0xe5, 0x0d, 0xbf,
|
||||
0x95, 0xe0, 0x25, 0x7c, 0x9f, 0xa8, 0xac, 0x40, 0x88, 0xc8, 0x72, 0x49, 0x18, 0xb4, 0xfd, 0x69,
|
||||
0x9a, 0x4f, 0xd0, 0xb6, 0x3f, 0x75, 0xef, 0xc7, 0xbc, 0x47, 0x03, 0x76, 0x40, 0xa9, 0x87, 0xa6,
|
||||
0x06, 0x80, 0xfe, 0x96, 0x13, 0x7d, 0xe8, 0x2e, 0x41, 0x0c, 0x28, 0x9c, 0x04, 0xae, 0x5e, 0x17,
|
||||
0x2c, 0x67, 0xe3, 0xe6, 0xd4, 0x1c, 0xe6, 0xca, 0x23, 0x82, 0xae, 0x20, 0x69, 0x63, 0x91, 0x94,
|
||||
0x9f, 0xcd, 0xf3, 0xd3, 0x92, 0x5f, 0xa5, 0x19, 0x2b, 0x41, 0x2c, 0x52, 0xea, 0x8e, 0x9c, 0x88,
|
||||
0x45, 0x18, 0x67, 0x8f, 0x5f, 0x48, 0xa9, 0x77, 0x4d, 0xf5, 0x79, 0x19, 0x27, 0xf0, 0xf8, 0x85,
|
||||
0xb2, 0xd1, 0xc6, 0x88, 0x9d, 0xc1, 0x00, 0xee, 0x24, 0x3a, 0xca, 0x75, 0xbe, 0x94, 0xfd, 0x43,
|
||||
0x7f, 0x4b, 0x28, 0x6f, 0x8d, 0xac, 0x40, 0xa2, 0xa3, 0xcd, 0x61, 0x24, 0x91, 0xe8, 0x84, 0x35,
|
||||
0xec, 0x54, 0x22, 0xb9, 0x13, 0x7d, 0xac, 0x08, 0x4c, 0x25, 0xca, 0x46, 0x23, 0x24, 0xa6, 0x92,
|
||||
0x16, 0x04, 0x02, 0x52, 0x33, 0x0c, 0x26, 0x68, 0x40, 0x32, 0xd2, 0x60, 0x40, 0x72, 0x29, 0x1b,
|
||||
0x28, 0x0e, 0xf3, 0x54, 0xa4, 0x71, 0x36, 0x62, 0xe2, 0x34, 0x2e, 0xe3, 0x19, 0x13, 0xac, 0x84,
|
||||
0x81, 0x42, 0x23, 0x43, 0x8f, 0x21, 0x02, 0x05, 0xc5, 0x6a, 0x87, 0x7f, 0x10, 0x7d, 0x58, 0xcf,
|
||||
0xfb, 0x2c, 0xd7, 0x7f, 0x90, 0xe2, 0x95, 0xfc, 0x4b, 0x36, 0x83, 0x8f, 0x8c, 0x8d, 0x91, 0x28,
|
||||
0x59, 0x3c, 0x6b, 0x6c, 0x7f, 0x60, 0x7e, 0x97, 0xe0, 0xd3, 0x95, 0xba, 0x3f, 0x9f, 0x70, 0x91,
|
||||
0x5e, 0xd5, 0xcb, 0x6c, 0xfd, 0x05, 0x11, 0xe8, 0xcf, 0xae, 0x78, 0x18, 0xb8, 0x17, 0x02, 0xe3,
|
||||
0x6c, 0x9c, 0x76, 0xa5, 0x67, 0xac, 0xc8, 0x60, 0x9c, 0xf6, 0xb4, 0x25, 0x40, 0xc4, 0x69, 0x14,
|
||||
0xb4, 0x83, 0xd3, 0x15, 0x9f, 0xb3, 0x70, 0x65, 0xce, 0x59, 0xbf, 0xca, 0x9c, 0x7b, 0x1f, 0x65,
|
||||
0x64, 0xd1, 0x87, 0xc7, 0x6c, 0x76, 0xc9, 0xca, 0x6a, 0x9a, 0x16, 0x07, 0x75, 0xc2, 0x15, 0x8b,
|
||||
0x39, 0xfc, 0x6c, 0xd1, 0x12, 0x43, 0x83, 0x10, 0x59, 0x29, 0x81, 0xda, 0x99, 0xc0, 0x02, 0x87,
|
||||
0xd5, 0x49, 0x3c, 0x63, 0xf2, 0x96, 0x0b, 0x30, 0x13, 0x38, 0x46, 0x1c, 0x88, 0x98, 0x09, 0x48,
|
||||
0xd8, 0xf9, 0xbe, 0xcb, 0x32, 0x67, 0x6c, 0x52, 0xf7, 0xb0, 0xf2, 0x34, 0x5e, 0xce, 0x58, 0x2e,
|
||||
0xb4, 0x49, 0xb0, 0x27, 0xef, 0x98, 0xc4, 0x79, 0x62, 0x4f, 0xbe, 0x8f, 0x9e, 0x13, 0x9a, 0xbc,
|
||||
0x07, 0x7f, 0xca, 0x4b, 0xa1, 0xfe, 0xdc, 0xcc, 0x45, 0x99, 0x81, 0xd0, 0xe4, 0x3f, 0x54, 0x8f,
|
||||
0x24, 0x42, 0x53, 0x58, 0xc3, 0xb9, 0xa7, 0xdd, 0x2b, 0xc3, 0x1b, 0x56, 0x9a, 0x7e, 0xf2, 0x6a,
|
||||
0x16, 0xa7, 0x99, 0xee, 0x0d, 0x3f, 0x0a, 0xd8, 0x26, 0x74, 0x88, 0x7b, 0xda, 0xfb, 0xea, 0x3a,
|
||||
0x37, 0xdb, 0x87, 0x4b, 0x08, 0x5e, 0x11, 0x74, 0xd8, 0x27, 0x5e, 0x11, 0x74, 0x6b, 0xd9, 0x95,
|
||||
0xbb, 0x65, 0x25, 0xb7, 0x94, 0xc4, 0x2e, 0x1f, 0xc3, 0xfd, 0x42, 0xc7, 0x26, 0x00, 0x89, 0x95,
|
||||
0x7b, 0x50, 0xc1, 0xa6, 0x06, 0x16, 0xdb, 0x4f, 0xf3, 0x38, 0x4b, 0x7f, 0x06, 0xd3, 0x7a, 0xc7,
|
||||
0x4e, 0x43, 0x10, 0xa9, 0x01, 0x4e, 0x62, 0xae, 0x0e, 0x98, 0x38, 0x4f, 0xeb, 0xd0, 0xbf, 0x1e,
|
||||
0x78, 0x6e, 0x92, 0xe8, 0x76, 0xe5, 0x90, 0xce, 0x7d, 0xa9, 0xf0, 0xb1, 0xee, 0x14, 0xc5, 0xa8,
|
||||
0x9e, 0x55, 0xcf, 0x58, 0xc2, 0xd2, 0x42, 0x0c, 0x5e, 0x84, 0x9f, 0x15, 0xc0, 0x89, 0x83, 0x16,
|
||||
0x3d, 0xd4, 0x9c, 0xd7, 0xf7, 0x75, 0x2c, 0x19, 0xa9, 0xbf, 0xc3, 0x76, 0x51, 0xb1, 0x52, 0x27,
|
||||
0x1a, 0x07, 0x4c, 0x80, 0xd1, 0xe9, 0x70, 0x43, 0x07, 0xac, 0x2b, 0x4a, 0x8c, 0xce, 0xb0, 0x86,
|
||||
0xdd, 0xec, 0x73, 0xb8, 0x33, 0x56, 0xf1, 0x6c, 0xc1, 0xe4, 0x79, 0xc3, 0x4d, 0xd2, 0x98, 0x43,
|
||||
0x11, 0x9b, 0x7d, 0x34, 0x6d, 0xb3, 0xb5, 0xb6, 0xdb, 0x9d, 0x7c, 0x79, 0x08, 0x8f, 0x4c, 0x20,
|
||||
0x96, 0x24, 0x46, 0x64, 0x6b, 0x01, 0xdc, 0xd9, 0x0c, 0x2f, 0x79, 0x3c, 0x4e, 0xe2, 0x4a, 0x9c,
|
||||
0xc6, 0xcb, 0x8c, 0xc7, 0x63, 0x39, 0xaf, 0xc3, 0xcd, 0xf0, 0x86, 0x19, 0xba, 0x10, 0xb5, 0x19,
|
||||
0x4e, 0xc1, 0x6e, 0x76, 0x26, 0xff, 0xbc, 0x9c, 0x3e, 0xcb, 0x09, 0xb3, 0x33, 0x59, 0x5e, 0x78,
|
||||
0x8e, 0xf3, 0x61, 0x18, 0xb2, 0xdf, 0xa0, 0x29, 0x91, 0x4c, 0x43, 0xee, 0x61, 0x3a, 0x5e, 0x02,
|
||||
0x72, 0x3f, 0x40, 0xd8, 0x7b, 0x29, 0xd4, 0xef, 0xcd, 0x5f, 0x48, 0x11, 0xfa, 0x56, 0xe9, 0x4d,
|
||||
0x4c, 0xd7, 0x85, 0x86, 0xee, 0x65, 0x73, 0x5b, 0x3d, 0x69, 0x9b, 0x66, 0xee, 0x4e, 0x63, 0xb1,
|
||||
0x33, 0x1e, 0x1f, 0xb3, 0x0a, 0xf9, 0xa0, 0xbc, 0x16, 0x0e, 0xad, 0x94, 0x48, 0x33, 0xdb, 0x94,
|
||||
0xed, 0xe8, 0xb5, 0xec, 0xd5, 0x38, 0x15, 0x5a, 0xd6, 0x9c, 0x90, 0xde, 0x6c, 0x1b, 0x68, 0x53,
|
||||
0x44, 0xad, 0x68, 0xda, 0xc6, 0xf2, 0x9a, 0x39, 0xe7, 0x93, 0x49, 0xc6, 0x34, 0x74, 0xc6, 0x62,
|
||||
0x75, 0xa9, 0xde, 0x76, 0xdb, 0x16, 0x0a, 0x12, 0xb1, 0x3c, 0xa8, 0x60, 0xd3, 0xc8, 0x1a, 0x53,
|
||||
0xaf, 0xa4, 0x9a, 0x07, 0xbb, 0xd6, 0x36, 0xe3, 0x01, 0x44, 0x1a, 0x89, 0x82, 0xf6, 0xbb, 0xb7,
|
||||
0x5a, 0x7c, 0xc0, 0x9a, 0x27, 0x01, 0xaf, 0x20, 0x92, 0xca, 0x8e, 0x98, 0xf8, 0xee, 0x0d, 0xc1,
|
||||
0xec, 0x3a, 0x01, 0x78, 0x78, 0xb9, 0x3c, 0x1c, 0xc3, 0x75, 0x02, 0xd4, 0x97, 0x0c, 0xb1, 0x4e,
|
||||
0xa0, 0x58, 0xbf, 0xe9, 0xcc, 0xbe, 0xd7, 0x51, 0x5c, 0xd9, 0xca, 0x21, 0x4d, 0x87, 0x82, 0xa1,
|
||||
0xa6, 0xa3, 0x14, 0xfc, 0x47, 0xea, 0x6e, 0xad, 0x21, 0x8f, 0x14, 0xdb, 0x57, 0x5b, 0xed, 0xc2,
|
||||
0xe0, 0x65, 0xfe, 0x7a, 0xe0, 0xe1, 0x97, 0xf9, 0x6b, 0x61, 0xf8, 0x32, 0x7f, 0x0b, 0x29, 0xdb,
|
||||
0x2f, 0xef, 0xff, 0xd7, 0x37, 0x77, 0x56, 0x7e, 0xf9, 0xcd, 0x9d, 0x95, 0xff, 0xf9, 0xe6, 0xce,
|
||||
0xca, 0x2f, 0xbe, 0xbd, 0xf3, 0xde, 0x2f, 0xbf, 0xbd, 0xf3, 0xde, 0x7f, 0x7f, 0x7b, 0xe7, 0xbd,
|
||||
0xaf, 0xdf, 0xd7, 0x7f, 0x76, 0xf4, 0xf2, 0xff, 0xc9, 0x3f, 0x1e, 0xfa, 0xfc, 0xff, 0x02, 0x00,
|
||||
0x00, 0xff, 0xff, 0x13, 0x78, 0x6c, 0x1f, 0x9a, 0x74, 0x00, 0x00,
|
||||
}
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
|
@ -619,6 +623,8 @@ type ClientCommandsHandler interface {
|
|||
BlockDivListSetStyle(context.Context, *pb.RpcBlockDivListSetStyleRequest) *pb.RpcBlockDivListSetStyleResponse
|
||||
BlockLatexSetText(context.Context, *pb.RpcBlockLatexSetTextRequest) *pb.RpcBlockLatexSetTextResponse
|
||||
ProcessCancel(context.Context, *pb.RpcProcessCancelRequest) *pb.RpcProcessCancelResponse
|
||||
ProcessSubscribe(context.Context, *pb.RpcProcessSubscribeRequest) *pb.RpcProcessSubscribeResponse
|
||||
ProcessUnsubscribe(context.Context, *pb.RpcProcessUnsubscribeRequest) *pb.RpcProcessUnsubscribeResponse
|
||||
LogSend(context.Context, *pb.RpcLogSendRequest) *pb.RpcLogSendResponse
|
||||
DebugStat(context.Context, *pb.RpcDebugStatRequest) *pb.RpcDebugStatResponse
|
||||
DebugTree(context.Context, *pb.RpcDebugTreeRequest) *pb.RpcDebugTreeResponse
|
||||
|
@ -632,7 +638,9 @@ type ClientCommandsHandler interface {
|
|||
DebugRunProfiler(context.Context, *pb.RpcDebugRunProfilerRequest) *pb.RpcDebugRunProfilerResponse
|
||||
DebugAccountSelectTrace(context.Context, *pb.RpcDebugAccountSelectTraceRequest) *pb.RpcDebugAccountSelectTraceResponse
|
||||
DebugAnystoreObjectChanges(context.Context, *pb.RpcDebugAnystoreObjectChangesRequest) *pb.RpcDebugAnystoreObjectChangesResponse
|
||||
MetricsSetParameters(context.Context, *pb.RpcMetricsSetParametersRequest) *pb.RpcMetricsSetParametersResponse
|
||||
DebugNetCheck(context.Context, *pb.RpcDebugNetCheckRequest) *pb.RpcDebugNetCheckResponse
|
||||
DebugExportLog(context.Context, *pb.RpcDebugExportLogRequest) *pb.RpcDebugExportLogResponse
|
||||
InitialSetParameters(context.Context, *pb.RpcInitialSetParametersRequest) *pb.RpcInitialSetParametersResponse
|
||||
// used only for lib-server via grpc
|
||||
// Streams not supported ### ListenSessionEvents(context.Context, *pb.StreamRequest)
|
||||
NotificationList(context.Context, *pb.RpcNotificationListRequest) *pb.RpcNotificationListResponse
|
||||
|
@ -5090,6 +5098,46 @@ func ProcessCancel(b []byte) (resp []byte) {
|
|||
return resp
|
||||
}
|
||||
|
||||
func ProcessSubscribe(b []byte) (resp []byte) {
|
||||
defer func() {
|
||||
if PanicHandler != nil {
|
||||
if r := recover(); r != nil {
|
||||
resp, _ = (&pb.RpcProcessSubscribeResponse{Error: &pb.RpcProcessSubscribeResponseError{Code: pb.RpcProcessSubscribeResponseError_UNKNOWN_ERROR, Description: "panic recovered"}}).Marshal()
|
||||
PanicHandler(r)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
in := new(pb.RpcProcessSubscribeRequest)
|
||||
if err := in.Unmarshal(b); err != nil {
|
||||
resp, _ = (&pb.RpcProcessSubscribeResponse{Error: &pb.RpcProcessSubscribeResponseError{Code: pb.RpcProcessSubscribeResponseError_BAD_INPUT, Description: err.Error()}}).Marshal()
|
||||
return resp
|
||||
}
|
||||
|
||||
resp, _ = clientCommandsHandler.ProcessSubscribe(context.Background(), in).Marshal()
|
||||
return resp
|
||||
}
|
||||
|
||||
func ProcessUnsubscribe(b []byte) (resp []byte) {
|
||||
defer func() {
|
||||
if PanicHandler != nil {
|
||||
if r := recover(); r != nil {
|
||||
resp, _ = (&pb.RpcProcessUnsubscribeResponse{Error: &pb.RpcProcessUnsubscribeResponseError{Code: pb.RpcProcessUnsubscribeResponseError_UNKNOWN_ERROR, Description: "panic recovered"}}).Marshal()
|
||||
PanicHandler(r)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
in := new(pb.RpcProcessUnsubscribeRequest)
|
||||
if err := in.Unmarshal(b); err != nil {
|
||||
resp, _ = (&pb.RpcProcessUnsubscribeResponse{Error: &pb.RpcProcessUnsubscribeResponseError{Code: pb.RpcProcessUnsubscribeResponseError_BAD_INPUT, Description: err.Error()}}).Marshal()
|
||||
return resp
|
||||
}
|
||||
|
||||
resp, _ = clientCommandsHandler.ProcessUnsubscribe(context.Background(), in).Marshal()
|
||||
return resp
|
||||
}
|
||||
|
||||
func LogSend(b []byte) (resp []byte) {
|
||||
defer func() {
|
||||
if PanicHandler != nil {
|
||||
|
@ -5350,23 +5398,63 @@ func DebugAnystoreObjectChanges(b []byte) (resp []byte) {
|
|||
return resp
|
||||
}
|
||||
|
||||
func MetricsSetParameters(b []byte) (resp []byte) {
|
||||
func DebugNetCheck(b []byte) (resp []byte) {
|
||||
defer func() {
|
||||
if PanicHandler != nil {
|
||||
if r := recover(); r != nil {
|
||||
resp, _ = (&pb.RpcMetricsSetParametersResponse{Error: &pb.RpcMetricsSetParametersResponseError{Code: pb.RpcMetricsSetParametersResponseError_UNKNOWN_ERROR, Description: "panic recovered"}}).Marshal()
|
||||
resp, _ = (&pb.RpcDebugNetCheckResponse{Error: &pb.RpcDebugNetCheckResponseError{Code: pb.RpcDebugNetCheckResponseError_UNKNOWN_ERROR, Description: "panic recovered"}}).Marshal()
|
||||
PanicHandler(r)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
in := new(pb.RpcMetricsSetParametersRequest)
|
||||
in := new(pb.RpcDebugNetCheckRequest)
|
||||
if err := in.Unmarshal(b); err != nil {
|
||||
resp, _ = (&pb.RpcMetricsSetParametersResponse{Error: &pb.RpcMetricsSetParametersResponseError{Code: pb.RpcMetricsSetParametersResponseError_BAD_INPUT, Description: err.Error()}}).Marshal()
|
||||
resp, _ = (&pb.RpcDebugNetCheckResponse{Error: &pb.RpcDebugNetCheckResponseError{Code: pb.RpcDebugNetCheckResponseError_BAD_INPUT, Description: err.Error()}}).Marshal()
|
||||
return resp
|
||||
}
|
||||
|
||||
resp, _ = clientCommandsHandler.MetricsSetParameters(context.Background(), in).Marshal()
|
||||
resp, _ = clientCommandsHandler.DebugNetCheck(context.Background(), in).Marshal()
|
||||
return resp
|
||||
}
|
||||
|
||||
func DebugExportLog(b []byte) (resp []byte) {
|
||||
defer func() {
|
||||
if PanicHandler != nil {
|
||||
if r := recover(); r != nil {
|
||||
resp, _ = (&pb.RpcDebugExportLogResponse{Error: &pb.RpcDebugExportLogResponseError{Code: pb.RpcDebugExportLogResponseError_UNKNOWN_ERROR, Description: "panic recovered"}}).Marshal()
|
||||
PanicHandler(r)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
in := new(pb.RpcDebugExportLogRequest)
|
||||
if err := in.Unmarshal(b); err != nil {
|
||||
resp, _ = (&pb.RpcDebugExportLogResponse{Error: &pb.RpcDebugExportLogResponseError{Code: pb.RpcDebugExportLogResponseError_BAD_INPUT, Description: err.Error()}}).Marshal()
|
||||
return resp
|
||||
}
|
||||
|
||||
resp, _ = clientCommandsHandler.DebugExportLog(context.Background(), in).Marshal()
|
||||
return resp
|
||||
}
|
||||
|
||||
func InitialSetParameters(b []byte) (resp []byte) {
|
||||
defer func() {
|
||||
if PanicHandler != nil {
|
||||
if r := recover(); r != nil {
|
||||
resp, _ = (&pb.RpcInitialSetParametersResponse{Error: &pb.RpcInitialSetParametersResponseError{Code: pb.RpcInitialSetParametersResponseError_UNKNOWN_ERROR, Description: "panic recovered"}}).Marshal()
|
||||
PanicHandler(r)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
in := new(pb.RpcInitialSetParametersRequest)
|
||||
if err := in.Unmarshal(b); err != nil {
|
||||
resp, _ = (&pb.RpcInitialSetParametersResponse{Error: &pb.RpcInitialSetParametersResponseError{Code: pb.RpcInitialSetParametersResponseError_BAD_INPUT, Description: err.Error()}}).Marshal()
|
||||
return resp
|
||||
}
|
||||
|
||||
resp, _ = clientCommandsHandler.InitialSetParameters(context.Background(), in).Marshal()
|
||||
return resp
|
||||
}
|
||||
|
||||
|
@ -6396,6 +6484,10 @@ func CommandAsync(cmd string, data []byte, callback func(data []byte)) {
|
|||
cd = BlockLatexSetText(data)
|
||||
case "ProcessCancel":
|
||||
cd = ProcessCancel(data)
|
||||
case "ProcessSubscribe":
|
||||
cd = ProcessSubscribe(data)
|
||||
case "ProcessUnsubscribe":
|
||||
cd = ProcessUnsubscribe(data)
|
||||
case "LogSend":
|
||||
cd = LogSend(data)
|
||||
case "DebugStat":
|
||||
|
@ -6422,8 +6514,12 @@ func CommandAsync(cmd string, data []byte, callback func(data []byte)) {
|
|||
cd = DebugAccountSelectTrace(data)
|
||||
case "DebugAnystoreObjectChanges":
|
||||
cd = DebugAnystoreObjectChanges(data)
|
||||
case "MetricsSetParameters":
|
||||
cd = MetricsSetParameters(data)
|
||||
case "DebugNetCheck":
|
||||
cd = DebugNetCheck(data)
|
||||
case "DebugExportLog":
|
||||
cd = DebugExportLog(data)
|
||||
case "InitialSetParameters":
|
||||
cd = InitialSetParameters(data)
|
||||
case "NotificationList":
|
||||
cd = NotificationList(data)
|
||||
case "NotificationReply":
|
||||
|
@ -9584,6 +9680,34 @@ func (h *ClientCommandsHandlerProxy) ProcessCancel(ctx context.Context, req *pb.
|
|||
call, _ := actualCall(ctx, req)
|
||||
return call.(*pb.RpcProcessCancelResponse)
|
||||
}
|
||||
func (h *ClientCommandsHandlerProxy) ProcessSubscribe(ctx context.Context, req *pb.RpcProcessSubscribeRequest) *pb.RpcProcessSubscribeResponse {
|
||||
actualCall := func(ctx context.Context, req any) (any, error) {
|
||||
return h.client.ProcessSubscribe(ctx, req.(*pb.RpcProcessSubscribeRequest)), nil
|
||||
}
|
||||
for _, interceptor := range h.interceptors {
|
||||
toCall := actualCall
|
||||
currentInterceptor := interceptor
|
||||
actualCall = func(ctx context.Context, req any) (any, error) {
|
||||
return currentInterceptor(ctx, req, "ProcessSubscribe", toCall)
|
||||
}
|
||||
}
|
||||
call, _ := actualCall(ctx, req)
|
||||
return call.(*pb.RpcProcessSubscribeResponse)
|
||||
}
|
||||
func (h *ClientCommandsHandlerProxy) ProcessUnsubscribe(ctx context.Context, req *pb.RpcProcessUnsubscribeRequest) *pb.RpcProcessUnsubscribeResponse {
|
||||
actualCall := func(ctx context.Context, req any) (any, error) {
|
||||
return h.client.ProcessUnsubscribe(ctx, req.(*pb.RpcProcessUnsubscribeRequest)), nil
|
||||
}
|
||||
for _, interceptor := range h.interceptors {
|
||||
toCall := actualCall
|
||||
currentInterceptor := interceptor
|
||||
actualCall = func(ctx context.Context, req any) (any, error) {
|
||||
return currentInterceptor(ctx, req, "ProcessUnsubscribe", toCall)
|
||||
}
|
||||
}
|
||||
call, _ := actualCall(ctx, req)
|
||||
return call.(*pb.RpcProcessUnsubscribeResponse)
|
||||
}
|
||||
func (h *ClientCommandsHandlerProxy) LogSend(ctx context.Context, req *pb.RpcLogSendRequest) *pb.RpcLogSendResponse {
|
||||
actualCall := func(ctx context.Context, req any) (any, error) {
|
||||
return h.client.LogSend(ctx, req.(*pb.RpcLogSendRequest)), nil
|
||||
|
@ -9766,19 +9890,47 @@ func (h *ClientCommandsHandlerProxy) DebugAnystoreObjectChanges(ctx context.Cont
|
|||
call, _ := actualCall(ctx, req)
|
||||
return call.(*pb.RpcDebugAnystoreObjectChangesResponse)
|
||||
}
|
||||
func (h *ClientCommandsHandlerProxy) MetricsSetParameters(ctx context.Context, req *pb.RpcMetricsSetParametersRequest) *pb.RpcMetricsSetParametersResponse {
|
||||
func (h *ClientCommandsHandlerProxy) DebugNetCheck(ctx context.Context, req *pb.RpcDebugNetCheckRequest) *pb.RpcDebugNetCheckResponse {
|
||||
actualCall := func(ctx context.Context, req any) (any, error) {
|
||||
return h.client.MetricsSetParameters(ctx, req.(*pb.RpcMetricsSetParametersRequest)), nil
|
||||
return h.client.DebugNetCheck(ctx, req.(*pb.RpcDebugNetCheckRequest)), nil
|
||||
}
|
||||
for _, interceptor := range h.interceptors {
|
||||
toCall := actualCall
|
||||
currentInterceptor := interceptor
|
||||
actualCall = func(ctx context.Context, req any) (any, error) {
|
||||
return currentInterceptor(ctx, req, "MetricsSetParameters", toCall)
|
||||
return currentInterceptor(ctx, req, "DebugNetCheck", toCall)
|
||||
}
|
||||
}
|
||||
call, _ := actualCall(ctx, req)
|
||||
return call.(*pb.RpcMetricsSetParametersResponse)
|
||||
return call.(*pb.RpcDebugNetCheckResponse)
|
||||
}
|
||||
func (h *ClientCommandsHandlerProxy) DebugExportLog(ctx context.Context, req *pb.RpcDebugExportLogRequest) *pb.RpcDebugExportLogResponse {
|
||||
actualCall := func(ctx context.Context, req any) (any, error) {
|
||||
return h.client.DebugExportLog(ctx, req.(*pb.RpcDebugExportLogRequest)), nil
|
||||
}
|
||||
for _, interceptor := range h.interceptors {
|
||||
toCall := actualCall
|
||||
currentInterceptor := interceptor
|
||||
actualCall = func(ctx context.Context, req any) (any, error) {
|
||||
return currentInterceptor(ctx, req, "DebugExportLog", toCall)
|
||||
}
|
||||
}
|
||||
call, _ := actualCall(ctx, req)
|
||||
return call.(*pb.RpcDebugExportLogResponse)
|
||||
}
|
||||
func (h *ClientCommandsHandlerProxy) InitialSetParameters(ctx context.Context, req *pb.RpcInitialSetParametersRequest) *pb.RpcInitialSetParametersResponse {
|
||||
actualCall := func(ctx context.Context, req any) (any, error) {
|
||||
return h.client.InitialSetParameters(ctx, req.(*pb.RpcInitialSetParametersRequest)), nil
|
||||
}
|
||||
for _, interceptor := range h.interceptors {
|
||||
toCall := actualCall
|
||||
currentInterceptor := interceptor
|
||||
actualCall = func(ctx context.Context, req any) (any, error) {
|
||||
return currentInterceptor(ctx, req, "InitialSetParameters", toCall)
|
||||
}
|
||||
}
|
||||
call, _ := actualCall(ctx, req)
|
||||
return call.(*pb.RpcInitialSetParametersResponse)
|
||||
}
|
||||
func (h *ClientCommandsHandlerProxy) NotificationList(ctx context.Context, req *pb.RpcNotificationListRequest) *pb.RpcNotificationListResponse {
|
||||
actualCall := func(ctx context.Context, req any) (any, error) {
|
||||
|
|
|
@ -121,6 +121,18 @@ func handleZip(input, output string) {
|
|||
defer r.Close()
|
||||
|
||||
for _, f := range r.File {
|
||||
dir := filepath.Dir(f.Name)
|
||||
if dir != "." {
|
||||
// nolint: gosec
|
||||
outputDir := filepath.Join(output, dir)
|
||||
if _, err := os.Stat(outputDir); os.IsNotExist(err) {
|
||||
if err := os.MkdirAll(outputDir, 0755); err != nil {
|
||||
log.Printf("Failed to create output subdirectory: %v\n", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// assuming we are only working with files, not directories
|
||||
if f.FileInfo().IsDir() {
|
||||
continue
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
|
@ -142,8 +143,15 @@ func main() {
|
|||
if err != nil {
|
||||
log.Fatal("can't create temp file:", err)
|
||||
}
|
||||
g := graphviz.New()
|
||||
g.Render(gvo, graphviz.SVG, tf)
|
||||
ctx := context.Background()
|
||||
g, err := graphviz.New(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("can't open graphviz:", err)
|
||||
}
|
||||
err = g.Render(ctx, gvo, graphviz.SVG, tf)
|
||||
if err != nil {
|
||||
log.Fatal("can't render graphviz:", err)
|
||||
}
|
||||
fmt.Println("tree file:", tf.Name())
|
||||
tf.Close()
|
||||
open(tf.Name())
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/gogo/protobuf/jsonpb"
|
||||
|
@ -207,7 +208,7 @@ func collectUseCaseInfo(files []*zip.File, fileName string) (info *useCaseInfo,
|
|||
continue
|
||||
}
|
||||
|
||||
if strings.HasPrefix(f.Name, "files") {
|
||||
if strings.HasPrefix(f.Name, "files") || f.FileInfo().IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -296,14 +297,21 @@ func processFiles(files []*zip.File, zw *zip.Writer, info *useCaseInfo, flags *c
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
newData, err := processRawData(data, f.Name, info, flags)
|
||||
if err != nil {
|
||||
if !(flags.exclude && errors.Is(err, errValidationFailed)) {
|
||||
// just do not include object that failed validation
|
||||
incorrectFileFound = true
|
||||
|
||||
var newData []byte
|
||||
if f.FileInfo().IsDir() {
|
||||
newData = data
|
||||
} else {
|
||||
newData, err = processRawData(data, f.Name, info, flags)
|
||||
if err != nil {
|
||||
if !(flags.exclude && errors.Is(err, errValidationFailed)) {
|
||||
// just do not include object that failed validation
|
||||
incorrectFileFound = true
|
||||
}
|
||||
continue
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if newData == nil || !writeNewFile {
|
||||
continue
|
||||
}
|
||||
|
@ -464,7 +472,11 @@ func removeAccountRelatedDetails(s *pb.ChangeSnapshot) {
|
|||
bundle.RelationKeyLinks.String(),
|
||||
bundle.RelationKeyBacklinks.String(),
|
||||
bundle.RelationKeyWorkspaceId.String(),
|
||||
bundle.RelationKeyIdentityProfileLink.String():
|
||||
bundle.RelationKeyIdentityProfileLink.String(),
|
||||
bundle.RelationKeyAddedDate.String(),
|
||||
bundle.RelationKeySyncDate.String(),
|
||||
bundle.RelationKeySyncError.String(),
|
||||
bundle.RelationKeySyncStatus.String():
|
||||
|
||||
delete(s.Data.Details.Fields, key)
|
||||
}
|
||||
|
@ -492,7 +504,7 @@ func processProfile(data []byte, info *useCaseInfo, spaceDashboardId string) ([]
|
|||
}
|
||||
|
||||
fmt.Println("spaceDashboardId = " + profile.SpaceDashboardId)
|
||||
if _, found := info.objects[profile.SpaceDashboardId]; !found {
|
||||
if _, found := info.objects[profile.SpaceDashboardId]; !found && !slices.Contains([]string{"lastOpened"}, profile.SpaceDashboardId) {
|
||||
err := fmt.Errorf("failed to find Space Dashboard object '%s' among provided", profile.SpaceDashboardId)
|
||||
fmt.Println(err)
|
||||
return nil, err
|
||||
|
|
|
@ -164,6 +164,7 @@ func (s *service) GetSpaceInfo(ctx context.Context, spaceId string) (*model.Acco
|
|||
getInfo.AccountSpaceId = spaceId
|
||||
getInfo.SpaceViewId = spaceViewId
|
||||
getInfo.HomeObjectId = ids.Home
|
||||
getInfo.WorkspaceObjectId = ids.Workspace
|
||||
getInfo.WidgetsId = ids.Widgets
|
||||
getInfo.ArchiveObjectId = ids.Archive
|
||||
return getInfo, nil
|
||||
|
|
|
@ -134,6 +134,7 @@ func (s *Service) setAccountAndProfileDetails(ctx context.Context, req *pb.RpcAc
|
|||
RpcFileUploadRequest: pb.RpcFileUploadRequest{
|
||||
LocalPath: req.GetAvatarLocalPath(),
|
||||
Type: model.BlockContentFile_Image,
|
||||
ImageKind: model.ImageKind_Icon,
|
||||
},
|
||||
ObjectOrigin: objectorigin.None(),
|
||||
})
|
||||
|
|
|
@ -19,6 +19,8 @@ import (
|
|||
"github.com/anyproto/anytype-heart/util/debug"
|
||||
)
|
||||
|
||||
var ErrNoFolder = fmt.Errorf("no folder provided")
|
||||
|
||||
func (s *Service) RunProfiler(ctx context.Context, seconds int) (string, error) {
|
||||
// Start
|
||||
inFlightTraceBuf, err := s.traceRecorder.stopAndGetInFlightTrace()
|
||||
|
@ -115,6 +117,32 @@ func (s *Service) SaveLoginTrace(dir string) (string, error) {
|
|||
return s.traceRecorder.save(dir)
|
||||
}
|
||||
|
||||
// empty dir means use system temp dir
|
||||
func (s *Service) SaveLog(srcPath, destDir string) (string, error) {
|
||||
if srcPath == "" {
|
||||
return "", ErrNoFolder
|
||||
}
|
||||
targetFile, err := os.CreateTemp(destDir, "anytype-log-*.zip")
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("create temp file: %w", err)
|
||||
}
|
||||
|
||||
file, err := os.Open(srcPath)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to open source file: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
err = createZipArchive(targetFile, []zipFile{
|
||||
{name: "anytype.log", data: file},
|
||||
})
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to create zip archive: %w", err)
|
||||
}
|
||||
|
||||
return targetFile.Name(), targetFile.Close()
|
||||
}
|
||||
|
||||
// traceRecorder is a helper to start and stop flight trace recorder
|
||||
type traceRecorder struct {
|
||||
lock sync.Mutex
|
||||
|
|
|
@ -200,8 +200,12 @@ func (mw *Middleware) BlockPaste(cctx context.Context, req *pb.RpcBlockPasteRequ
|
|||
log.Debug("Image requests to upload after paste:", uploadArr)
|
||||
for _, r := range uploadArr {
|
||||
r.ContextId = req.ContextId
|
||||
req := block.UploadRequest{ObjectOrigin: objectorigin.Clipboard(), RpcBlockUploadRequest: r}
|
||||
if err = bs.UploadBlockFile(nil, req, groupId); err != nil {
|
||||
req := block.UploadRequest{
|
||||
RpcBlockUploadRequest: r,
|
||||
ObjectOrigin: objectorigin.Clipboard(),
|
||||
ImageKind: model.ImageKind_AutomaticallyAdded,
|
||||
}
|
||||
if _, err = bs.UploadBlockFile(ctx, req, groupId, false); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
@ -309,7 +313,8 @@ func (mw *Middleware) BlockUpload(cctx context.Context, req *pb.RpcBlockUploadRe
|
|||
}
|
||||
err := mw.doBlockService(func(bs *block.Service) (err error) {
|
||||
req := block.UploadRequest{RpcBlockUploadRequest: *req, ObjectOrigin: objectorigin.None()}
|
||||
return bs.UploadBlockFile(nil, req, "")
|
||||
_, err = bs.UploadBlockFile(ctx, req, "", false)
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
return response(pb.RpcBlockUploadResponseError_UNKNOWN_ERROR, err)
|
||||
|
|
|
@ -26,7 +26,11 @@ import (
|
|||
"github.com/anyproto/anytype-heart/util/slice"
|
||||
)
|
||||
|
||||
const CName = "backlinks-update-watcher"
|
||||
const (
|
||||
CName = "backlinks-update-watcher"
|
||||
|
||||
defaultAggregationInterval = time.Second * 5
|
||||
)
|
||||
|
||||
var log = logging.Logger(CName)
|
||||
|
||||
|
@ -39,53 +43,69 @@ type backLinksUpdate struct {
|
|||
removed []string
|
||||
}
|
||||
|
||||
type UpdateWatcher struct {
|
||||
type UpdateWatcher interface {
|
||||
app.ComponentRunnable
|
||||
|
||||
FlushUpdates()
|
||||
}
|
||||
|
||||
type watcher struct {
|
||||
updater backlinksUpdater
|
||||
store objectstore.ObjectStore
|
||||
resolver idresolver.Resolver
|
||||
spaceService space.Service
|
||||
|
||||
infoBatch *mb.MB
|
||||
infoBatch *mb.MB
|
||||
lock sync.Mutex
|
||||
accumulatedBacklinks map[string]*backLinksUpdate
|
||||
aggregationInterval time.Duration
|
||||
}
|
||||
|
||||
func New() app.Component {
|
||||
return &UpdateWatcher{}
|
||||
func New() UpdateWatcher {
|
||||
return &watcher{}
|
||||
}
|
||||
|
||||
func (uw *UpdateWatcher) Name() string {
|
||||
func (w *watcher) Name() string {
|
||||
return CName
|
||||
}
|
||||
|
||||
func (uw *UpdateWatcher) Init(a *app.App) error {
|
||||
uw.updater = app.MustComponent[backlinksUpdater](a)
|
||||
uw.store = app.MustComponent[objectstore.ObjectStore](a)
|
||||
uw.resolver = app.MustComponent[idresolver.Resolver](a)
|
||||
uw.spaceService = app.MustComponent[space.Service](a)
|
||||
uw.infoBatch = mb.New(0)
|
||||
func (w *watcher) Init(a *app.App) error {
|
||||
w.updater = app.MustComponent[backlinksUpdater](a)
|
||||
w.store = app.MustComponent[objectstore.ObjectStore](a)
|
||||
w.resolver = app.MustComponent[idresolver.Resolver](a)
|
||||
w.spaceService = app.MustComponent[space.Service](a)
|
||||
|
||||
w.infoBatch = mb.New(0)
|
||||
w.accumulatedBacklinks = make(map[string]*backLinksUpdate)
|
||||
w.aggregationInterval = defaultAggregationInterval
|
||||
return nil
|
||||
}
|
||||
|
||||
func (uw *UpdateWatcher) Close(context.Context) error {
|
||||
if err := uw.infoBatch.Close(); err != nil {
|
||||
func (w *watcher) Close(context.Context) error {
|
||||
if err := w.infoBatch.Close(); err != nil {
|
||||
log.Errorf("failed to close message batch: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (uw *UpdateWatcher) Run(context.Context) error {
|
||||
uw.updater.SubscribeLinksUpdate(func(info spaceindex.LinksUpdateInfo) {
|
||||
if err := uw.infoBatch.Add(info); err != nil {
|
||||
func (w *watcher) Run(context.Context) error {
|
||||
w.updater.SubscribeLinksUpdate(func(info spaceindex.LinksUpdateInfo) {
|
||||
if err := w.infoBatch.Add(info); err != nil {
|
||||
log.With("objectId", info.LinksFromId).Errorf("failed to add backlinks update info to message batch: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
go uw.backlinksUpdateHandler()
|
||||
go w.backlinksUpdateHandler()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *watcher) FlushUpdates() {
|
||||
w.lock.Lock()
|
||||
defer w.lock.Unlock()
|
||||
|
||||
w.updateAccumulatedBacklinks()
|
||||
}
|
||||
|
||||
func applyUpdates(m map[string]*backLinksUpdate, update spaceindex.LinksUpdateInfo) {
|
||||
if update.LinksFromId == "" {
|
||||
return
|
||||
|
@ -116,71 +136,69 @@ func applyUpdates(m map[string]*backLinksUpdate, update spaceindex.LinksUpdateIn
|
|||
}
|
||||
}
|
||||
|
||||
func (uw *UpdateWatcher) backlinksUpdateHandler() {
|
||||
func (w *watcher) backlinksUpdateHandler() {
|
||||
var (
|
||||
accumulatedBacklinks = make(map[string]*backLinksUpdate)
|
||||
l sync.Mutex
|
||||
lastReceivedUpdates time.Time
|
||||
closedCh = make(chan struct{})
|
||||
aggregationInterval = time.Second * 5
|
||||
lastReceivedUpdates time.Time
|
||||
closedCh = make(chan struct{})
|
||||
)
|
||||
defer close(closedCh)
|
||||
|
||||
go func() {
|
||||
process := func() {
|
||||
log.Debugf("updating backlinks for %d objects", len(accumulatedBacklinks))
|
||||
for id, updates := range accumulatedBacklinks {
|
||||
uw.updateBackLinksInObject(id, updates)
|
||||
}
|
||||
accumulatedBacklinks = make(map[string]*backLinksUpdate)
|
||||
}
|
||||
for {
|
||||
select {
|
||||
case <-closedCh:
|
||||
l.Lock()
|
||||
process()
|
||||
l.Unlock()
|
||||
w.lock.Lock()
|
||||
w.updateAccumulatedBacklinks()
|
||||
w.lock.Unlock()
|
||||
return
|
||||
case <-time.After(aggregationInterval):
|
||||
l.Lock()
|
||||
if time.Since(lastReceivedUpdates) < aggregationInterval || len(accumulatedBacklinks) == 0 {
|
||||
l.Unlock()
|
||||
case <-time.After(w.aggregationInterval):
|
||||
w.lock.Lock()
|
||||
if time.Since(lastReceivedUpdates) < w.aggregationInterval || len(w.accumulatedBacklinks) == 0 {
|
||||
w.lock.Unlock()
|
||||
continue
|
||||
}
|
||||
|
||||
process()
|
||||
l.Unlock()
|
||||
w.updateAccumulatedBacklinks()
|
||||
w.lock.Unlock()
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
for {
|
||||
msgs := uw.infoBatch.Wait()
|
||||
msgs := w.infoBatch.Wait()
|
||||
if len(msgs) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
l.Lock()
|
||||
w.lock.Lock()
|
||||
for _, msg := range msgs {
|
||||
info, ok := msg.(spaceindex.LinksUpdateInfo)
|
||||
if !ok || hasSelfLinks(info) {
|
||||
continue
|
||||
}
|
||||
|
||||
applyUpdates(accumulatedBacklinks, info)
|
||||
applyUpdates(w.accumulatedBacklinks, info)
|
||||
}
|
||||
lastReceivedUpdates = time.Now()
|
||||
l.Unlock()
|
||||
w.lock.Unlock()
|
||||
}
|
||||
}
|
||||
|
||||
func (uw *UpdateWatcher) updateBackLinksInObject(id string, backlinksUpdate *backLinksUpdate) {
|
||||
spaceId, err := uw.resolver.ResolveSpaceID(id)
|
||||
func (w *watcher) updateAccumulatedBacklinks() {
|
||||
log.Debugf("updating backlinks for %d objects", len(w.accumulatedBacklinks))
|
||||
for id, updates := range w.accumulatedBacklinks {
|
||||
w.updateBackLinksInObject(id, updates)
|
||||
}
|
||||
w.accumulatedBacklinks = make(map[string]*backLinksUpdate)
|
||||
}
|
||||
|
||||
func (w *watcher) updateBackLinksInObject(id string, backlinksUpdate *backLinksUpdate) {
|
||||
spaceId, err := w.resolver.ResolveSpaceID(id)
|
||||
if err != nil {
|
||||
log.With("objectId", id).Errorf("failed to resolve space id for object: %v", err)
|
||||
return
|
||||
}
|
||||
spc, err := uw.spaceService.Get(context.Background(), spaceId)
|
||||
spc, err := w.spaceService.Get(context.Background(), spaceId)
|
||||
if err != nil {
|
||||
log.With("objectId", id, "spaceId", spaceId).Errorf("failed to get space: %v", err)
|
||||
return
|
||||
|
@ -207,7 +225,7 @@ func (uw *UpdateWatcher) updateBackLinksInObject(id string, backlinksUpdate *bac
|
|||
}
|
||||
|
||||
err = spc.DoLockedIfNotExists(id, func() error {
|
||||
return uw.store.SpaceIndex(spaceId).ModifyObjectDetails(id, func(details *types.Struct) (*types.Struct, bool, error) {
|
||||
return w.store.SpaceIndex(spaceId).ModifyObjectDetails(id, func(details *types.Struct) (*types.Struct, bool, error) {
|
||||
return updateBacklinks(details, backlinksUpdate)
|
||||
})
|
||||
})
|
||||
|
|
177
core/block/backlinks/watcher_test.go
Normal file
177
core/block/backlinks/watcher_test.go
Normal file
|
@ -0,0 +1,177 @@
|
|||
package backlinks
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/anyproto/any-sync/app/ocache"
|
||||
"github.com/cheggaaa/mb"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/object/idresolver/mock_idresolver"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore/spaceindex"
|
||||
"github.com/anyproto/anytype-heart/space/clientspace/mock_clientspace"
|
||||
"github.com/anyproto/anytype-heart/space/mock_space"
|
||||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
)
|
||||
|
||||
const spaceId = "spc1"
|
||||
|
||||
type testUpdater struct {
|
||||
callback func(info spaceindex.LinksUpdateInfo)
|
||||
runFunc func(callback func(info spaceindex.LinksUpdateInfo))
|
||||
}
|
||||
|
||||
func (u *testUpdater) SubscribeLinksUpdate(callback func(info spaceindex.LinksUpdateInfo)) {
|
||||
u.callback = callback
|
||||
}
|
||||
|
||||
func (u *testUpdater) start() {
|
||||
go u.runFunc(u.callback)
|
||||
}
|
||||
|
||||
type fixture struct {
|
||||
store *objectstore.StoreFixture
|
||||
resolver *mock_idresolver.MockResolver
|
||||
spaceService *mock_space.MockService
|
||||
updater *testUpdater
|
||||
*watcher
|
||||
}
|
||||
|
||||
func newFixture(t *testing.T, aggregationInterval time.Duration) *fixture {
|
||||
updater := &testUpdater{}
|
||||
store := objectstore.NewStoreFixture(t)
|
||||
resolver := mock_idresolver.NewMockResolver(t)
|
||||
spaceSvc := mock_space.NewMockService(t)
|
||||
|
||||
w := &watcher{
|
||||
updater: updater,
|
||||
store: store,
|
||||
resolver: resolver,
|
||||
spaceService: spaceSvc,
|
||||
|
||||
aggregationInterval: aggregationInterval,
|
||||
infoBatch: mb.New(0),
|
||||
accumulatedBacklinks: make(map[string]*backLinksUpdate),
|
||||
}
|
||||
|
||||
return &fixture{
|
||||
store: store,
|
||||
resolver: resolver,
|
||||
spaceService: spaceSvc,
|
||||
updater: updater,
|
||||
watcher: w,
|
||||
}
|
||||
}
|
||||
|
||||
func TestWatcher_Run(t *testing.T) {
|
||||
t.Run("backlinks update asynchronously", func(t *testing.T) {
|
||||
// given
|
||||
interval := 500 * time.Millisecond
|
||||
f := newFixture(t, interval)
|
||||
|
||||
f.resolver.EXPECT().ResolveSpaceID(mock.Anything).Return(spaceId, nil)
|
||||
|
||||
f.updater.runFunc = func(callback func(info spaceindex.LinksUpdateInfo)) {
|
||||
callback(spaceindex.LinksUpdateInfo{
|
||||
LinksFromId: "obj1",
|
||||
Added: []string{"obj2", "obj3"},
|
||||
Removed: nil,
|
||||
})
|
||||
time.Sleep(interval / 2)
|
||||
callback(spaceindex.LinksUpdateInfo{
|
||||
LinksFromId: "obj1",
|
||||
Added: []string{"obj4", "obj5"},
|
||||
Removed: []string{"obj2"},
|
||||
})
|
||||
time.Sleep(interval / 2)
|
||||
callback(spaceindex.LinksUpdateInfo{
|
||||
LinksFromId: "obj1",
|
||||
Added: []string{"obj6"},
|
||||
Removed: []string{"obj5"},
|
||||
})
|
||||
}
|
||||
|
||||
spc := mock_clientspace.NewMockSpace(t)
|
||||
f.spaceService.EXPECT().Get(mock.Anything, spaceId).Return(spc, nil)
|
||||
|
||||
spc.EXPECT().DoLockedIfNotExists(mock.Anything, mock.Anything).RunAndReturn(func(id string, apply func() error) error {
|
||||
if id == "obj2" {
|
||||
return ocache.ErrExists
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
spc.EXPECT().Do(mock.Anything, mock.Anything).Return(nil)
|
||||
|
||||
// when
|
||||
err := f.watcher.Run(nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
f.updater.start()
|
||||
|
||||
time.Sleep(4 * interval)
|
||||
err = f.watcher.Close(nil)
|
||||
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestWatcher_updateAccumulatedBacklinks(t *testing.T) {
|
||||
t.Run("no errors", func(t *testing.T) {
|
||||
// given
|
||||
f := newFixture(t, time.Second)
|
||||
f.resolver.EXPECT().ResolveSpaceID(mock.Anything).Return(spaceId, nil)
|
||||
|
||||
f.store.AddObjects(t, spaceId, []spaceindex.TestObject{{
|
||||
bundle.RelationKeyId: pbtypes.String("obj1"),
|
||||
bundle.RelationKeySpaceId: pbtypes.String(spaceId),
|
||||
bundle.RelationKeyBacklinks: pbtypes.StringList([]string{"obj4", "obj5", "obj6"}),
|
||||
}, {
|
||||
bundle.RelationKeyId: pbtypes.String("obj3"),
|
||||
bundle.RelationKeySpaceId: pbtypes.String(spaceId),
|
||||
bundle.RelationKeyBacklinks: pbtypes.StringList([]string{"obj1", "obj2", "obj4"}),
|
||||
}})
|
||||
|
||||
spc := mock_clientspace.NewMockSpace(t)
|
||||
f.spaceService.EXPECT().Get(mock.Anything, spaceId).Return(spc, nil)
|
||||
|
||||
spc.EXPECT().DoLockedIfNotExists(mock.Anything, mock.Anything).RunAndReturn(func(id string, apply func() error) error {
|
||||
if id == "obj2" {
|
||||
return ocache.ErrExists
|
||||
}
|
||||
return apply()
|
||||
})
|
||||
|
||||
spc.EXPECT().Do(mock.Anything, mock.Anything).Return(nil).Once()
|
||||
|
||||
f.watcher.accumulatedBacklinks = map[string]*backLinksUpdate{
|
||||
"obj1": {
|
||||
added: []string{"obj2", "obj3"},
|
||||
removed: []string{"obj4", "obj5"},
|
||||
},
|
||||
"obj2": {
|
||||
added: []string{"obj4", "obj5"},
|
||||
},
|
||||
"obj3": {
|
||||
removed: []string{"obj1", "obj4"},
|
||||
},
|
||||
}
|
||||
|
||||
// when
|
||||
f.watcher.updateAccumulatedBacklinks()
|
||||
|
||||
// then
|
||||
details, err := f.store.SpaceIndex(spaceId).GetDetails("obj1")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, []string{"obj6", "obj2", "obj3"}, pbtypes.GetStringList(details.Details, bundle.RelationKeyBacklinks.String()))
|
||||
details, err = f.store.SpaceIndex(spaceId).GetDetails("obj3")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, []string{"obj2"}, pbtypes.GetStringList(details.Details, bundle.RelationKeyBacklinks.String()))
|
||||
})
|
||||
}
|
|
@ -421,6 +421,6 @@ func (s *service) loadImage(spaceId string, title, url string) (hash string, err
|
|||
if title != "" {
|
||||
fileName = title
|
||||
}
|
||||
res := uploader.SetName(fileName).SetFile(tmpFile.Name()).Upload(ctx)
|
||||
res := uploader.SetName(fileName).SetFile(tmpFile.Name()).SetImageKind(model.ImageKind_AutomaticallyAdded).Upload(ctx)
|
||||
return res.FileObjectId, res.Err
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ const CName = "bookmark-importer"
|
|||
var log = logging.Logger("bookmark-importer")
|
||||
|
||||
type Importer interface {
|
||||
ImportWeb(ctx context.Context, req *pb.RpcObjectImportRequest) (string, *types.Struct, error)
|
||||
ImportWeb(ctx context.Context, req *importer.ImportRequest) (string, *types.Struct, error)
|
||||
}
|
||||
|
||||
type BookmarkImporterDecorator struct {
|
||||
|
@ -40,9 +40,11 @@ func (bd *BookmarkImporterDecorator) Init(a *app.App) (err error) {
|
|||
|
||||
func (bd *BookmarkImporterDecorator) CreateBookmarkObject(ctx context.Context, spaceID string, details *types.Struct, getContent bookmarksvc.ContentFuture) (objectId string, newDetails *types.Struct, err error) {
|
||||
url := pbtypes.GetString(details, bundle.RelationKeySource.String())
|
||||
if objectId, newDetails, err = bd.Importer.ImportWeb(nil, &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfBookmarksParams{BookmarksParams: &pb.RpcObjectImportRequestBookmarksParams{Url: url}},
|
||||
UpdateExistingObjects: true,
|
||||
if objectId, newDetails, err = bd.Importer.ImportWeb(nil, &importer.ImportRequest{
|
||||
RpcObjectImportRequest: &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfBookmarksParams{BookmarksParams: &pb.RpcObjectImportRequestBookmarksParams{Url: url}},
|
||||
UpdateExistingObjects: true,
|
||||
},
|
||||
}); err != nil {
|
||||
log.With(
|
||||
"function", "BookmarkFetch",
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
"github.com/gogo/protobuf/types"
|
||||
"github.com/samber/lo"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/backlinks"
|
||||
"github.com/anyproto/anytype-heart/core/block/cache"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/basic"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/smartblock"
|
||||
|
@ -28,10 +29,11 @@ import (
|
|||
var log = logging.Logger("collection-service")
|
||||
|
||||
type Service struct {
|
||||
lock *sync.RWMutex
|
||||
collections map[string]map[string]chan []string
|
||||
picker cache.ObjectGetter
|
||||
objectStore objectstore.ObjectStore
|
||||
lock *sync.RWMutex
|
||||
collections map[string]map[string]chan []string
|
||||
picker cache.ObjectGetter
|
||||
objectStore objectstore.ObjectStore
|
||||
backlinksUpdater backlinks.UpdateWatcher
|
||||
}
|
||||
|
||||
func New() *Service {
|
||||
|
@ -44,6 +46,7 @@ func New() *Service {
|
|||
func (s *Service) Init(a *app.App) (err error) {
|
||||
s.picker = app.MustComponent[cache.ObjectGetter](a)
|
||||
s.objectStore = app.MustComponent[objectstore.ObjectStore](a)
|
||||
s.backlinksUpdater = app.MustComponent[backlinks.UpdateWatcher](a)
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -56,8 +59,9 @@ func (s *Service) CollectionType() string {
|
|||
}
|
||||
|
||||
func (s *Service) Add(ctx session.Context, req *pb.RpcObjectCollectionAddRequest) error {
|
||||
return s.updateCollection(ctx, req.ContextId, func(col []string) []string {
|
||||
toAdd := slice.Difference(req.ObjectIds, col)
|
||||
var toAdd []string
|
||||
err := s.updateCollection(ctx, req.ContextId, func(col []string) []string {
|
||||
toAdd = slice.Difference(req.ObjectIds, col)
|
||||
pos := slice.FindPos(col, req.AfterId)
|
||||
if pos >= 0 {
|
||||
col = slice.Insert(col, pos+1, toAdd...)
|
||||
|
@ -66,6 +70,16 @@ func (s *Service) Add(ctx session.Context, req *pb.RpcObjectCollectionAddRequest
|
|||
}
|
||||
return col
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// we update backlinks of objects added to collection synchronously to avoid object rerender after backlinks accumulation interval
|
||||
if len(toAdd) != 0 {
|
||||
s.backlinksUpdater.FlushUpdates()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Service) Remove(ctx session.Context, req *pb.RpcObjectCollectionRemoveRequest) error {
|
||||
|
|
|
@ -2,6 +2,7 @@ package collection
|
|||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
|
@ -15,6 +16,7 @@ import (
|
|||
"github.com/anyproto/anytype-heart/core/block/editor/template"
|
||||
"github.com/anyproto/anytype-heart/core/block/simple/dataview"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
coresb "github.com/anyproto/anytype-heart/pkg/lib/core/smartblock"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore"
|
||||
|
@ -22,21 +24,42 @@ import (
|
|||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
)
|
||||
|
||||
const collectionID = "collectionID"
|
||||
|
||||
type testPicker struct {
|
||||
sb smartblock.SmartBlock
|
||||
sbMap map[string]smartblock.SmartBlock
|
||||
}
|
||||
|
||||
func (t *testPicker) GetObject(ctx context.Context, id string) (sb smartblock.SmartBlock, err error) {
|
||||
return t.sb, nil
|
||||
func (t *testPicker) GetObject(ctx context.Context, id string) (smartblock.SmartBlock, error) {
|
||||
if t.sbMap == nil {
|
||||
return nil, fmt.Errorf("not found")
|
||||
}
|
||||
sb, found := t.sbMap[id]
|
||||
if !found {
|
||||
return nil, fmt.Errorf("not found")
|
||||
}
|
||||
return sb, nil
|
||||
}
|
||||
|
||||
func (t *testPicker) GetObjectByFullID(ctx context.Context, id domain.FullID) (sb smartblock.SmartBlock, err error) {
|
||||
return t.sb, nil
|
||||
return t.GetObject(ctx, id.ObjectID)
|
||||
}
|
||||
|
||||
func (t *testPicker) Init(a *app.App) error { return nil }
|
||||
|
||||
func (t *testPicker) Name() string { return "" }
|
||||
func (t *testPicker) Name() string { return "test.picker" }
|
||||
|
||||
type testFlusher struct{}
|
||||
|
||||
func (tf *testFlusher) Name() string { return "test.flusher" }
|
||||
|
||||
func (tf *testFlusher) Init(*app.App) error { return nil }
|
||||
|
||||
func (tf *testFlusher) Run(context.Context) error { return nil }
|
||||
|
||||
func (tf *testFlusher) Close(context.Context) error { return nil }
|
||||
|
||||
func (tf *testFlusher) FlushUpdates() {}
|
||||
|
||||
type fixture struct {
|
||||
picker *testPicker
|
||||
|
@ -45,12 +68,14 @@ type fixture struct {
|
|||
}
|
||||
|
||||
func newFixture(t *testing.T) *fixture {
|
||||
picker := &testPicker{}
|
||||
a := &app.App{}
|
||||
picker := &testPicker{}
|
||||
flusher := &testFlusher{}
|
||||
objectStore := objectstore.NewStoreFixture(t)
|
||||
|
||||
a.Register(picker)
|
||||
a.Register(objectStore)
|
||||
a.Register(flusher)
|
||||
s := New()
|
||||
|
||||
err := s.Init(a)
|
||||
|
@ -59,11 +84,12 @@ func newFixture(t *testing.T) *fixture {
|
|||
}
|
||||
|
||||
func TestBroadcast(t *testing.T) {
|
||||
const collectionID = "collectionID"
|
||||
sb := smarttest.New(collectionID)
|
||||
|
||||
s := newFixture(t)
|
||||
s.picker.sb = sb
|
||||
s.picker.sbMap = map[string]smartblock.SmartBlock{
|
||||
collectionID: sb,
|
||||
}
|
||||
|
||||
_, subCh1, err := s.SubscribeForCollection(collectionID, "sub1")
|
||||
require.NoError(t, err)
|
||||
|
@ -193,3 +219,29 @@ func TestSetObjectTypeToViews(t *testing.T) {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestService_Add(t *testing.T) {
|
||||
t.Run("add new objects to collection", func(t *testing.T) {
|
||||
// given
|
||||
coll := smarttest.New(collectionID)
|
||||
obj1 := smarttest.New("obj1")
|
||||
obj2 := smarttest.New("obj2")
|
||||
|
||||
s := newFixture(t)
|
||||
s.picker.sbMap = map[string]smartblock.SmartBlock{
|
||||
collectionID: coll,
|
||||
"obj1": obj1,
|
||||
"obj2": obj2,
|
||||
}
|
||||
|
||||
// when
|
||||
err := s.Add(nil, &pb.RpcObjectCollectionAddRequest{
|
||||
ContextId: collectionID,
|
||||
ObjectIds: []string{"obj1", "obj2"},
|
||||
})
|
||||
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []string{"obj1", "obj2"}, coll.NewState().GetStoreSlice(template.CollectionStoreKey))
|
||||
})
|
||||
}
|
||||
|
|
|
@ -132,7 +132,7 @@ func (s *Service) OnDelete(id domain.FullID, workspaceRemove func() error) error
|
|||
b.ObjectCloseAllSessions()
|
||||
st := b.NewState()
|
||||
isFavorite := pbtypes.GetBool(st.LocalDetails(), bundle.RelationKeyIsFavorite.String())
|
||||
if err := s.detailsService.SetIsFavorite(id.ObjectID, isFavorite); err != nil {
|
||||
if err := s.detailsService.SetIsFavorite(id.ObjectID, isFavorite, false); err != nil {
|
||||
log.With("objectId", id).Errorf("failed to favorite object: %v", err)
|
||||
}
|
||||
b.SetIsDeleted()
|
||||
|
|
|
@ -80,42 +80,33 @@ func (_c *MockService_Init_Call) RunAndReturn(run func(*app.App) error) *MockSer
|
|||
}
|
||||
|
||||
// ListRelationsWithValue provides a mock function with given fields: spaceId, value
|
||||
func (_m *MockService) ListRelationsWithValue(spaceId string, value *types.Value) ([]string, []int64, error) {
|
||||
func (_m *MockService) ListRelationsWithValue(spaceId string, value *types.Value) ([]*pb.RpcRelationListWithValueResponseResponseItem, error) {
|
||||
ret := _m.Called(spaceId, value)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for ListRelationsWithValue")
|
||||
}
|
||||
|
||||
var r0 []string
|
||||
var r1 []int64
|
||||
var r2 error
|
||||
if rf, ok := ret.Get(0).(func(string, *types.Value) ([]string, []int64, error)); ok {
|
||||
var r0 []*pb.RpcRelationListWithValueResponseResponseItem
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(string, *types.Value) ([]*pb.RpcRelationListWithValueResponseResponseItem, error)); ok {
|
||||
return rf(spaceId, value)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(string, *types.Value) []string); ok {
|
||||
if rf, ok := ret.Get(0).(func(string, *types.Value) []*pb.RpcRelationListWithValueResponseResponseItem); ok {
|
||||
r0 = rf(spaceId, value)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]string)
|
||||
r0 = ret.Get(0).([]*pb.RpcRelationListWithValueResponseResponseItem)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(string, *types.Value) []int64); ok {
|
||||
if rf, ok := ret.Get(1).(func(string, *types.Value) error); ok {
|
||||
r1 = rf(spaceId, value)
|
||||
} else {
|
||||
if ret.Get(1) != nil {
|
||||
r1 = ret.Get(1).([]int64)
|
||||
}
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(2).(func(string, *types.Value) error); ok {
|
||||
r2 = rf(spaceId, value)
|
||||
} else {
|
||||
r2 = ret.Error(2)
|
||||
}
|
||||
|
||||
return r0, r1, r2
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// MockService_ListRelationsWithValue_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListRelationsWithValue'
|
||||
|
@ -137,12 +128,12 @@ func (_c *MockService_ListRelationsWithValue_Call) Run(run func(spaceId string,
|
|||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockService_ListRelationsWithValue_Call) Return(keys []string, counters []int64, err error) *MockService_ListRelationsWithValue_Call {
|
||||
_c.Call.Return(keys, counters, err)
|
||||
func (_c *MockService_ListRelationsWithValue_Call) Return(_a0 []*pb.RpcRelationListWithValueResponseResponseItem, _a1 error) *MockService_ListRelationsWithValue_Call {
|
||||
_c.Call.Return(_a0, _a1)
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockService_ListRelationsWithValue_Call) RunAndReturn(run func(string, *types.Value) ([]string, []int64, error)) *MockService_ListRelationsWithValue_Call {
|
||||
func (_c *MockService_ListRelationsWithValue_Call) RunAndReturn(run func(string, *types.Value) ([]*pb.RpcRelationListWithValueResponseResponseItem, error)) *MockService_ListRelationsWithValue_Call {
|
||||
_c.Call.Return(run)
|
||||
return _c
|
||||
}
|
||||
|
@ -572,17 +563,17 @@ func (_c *MockService_SetIsArchived_Call) RunAndReturn(run func(string, bool) er
|
|||
return _c
|
||||
}
|
||||
|
||||
// SetIsFavorite provides a mock function with given fields: objectId, isFavorite
|
||||
func (_m *MockService) SetIsFavorite(objectId string, isFavorite bool) error {
|
||||
ret := _m.Called(objectId, isFavorite)
|
||||
// SetIsFavorite provides a mock function with given fields: objectId, isFavorite, createWidget
|
||||
func (_m *MockService) SetIsFavorite(objectId string, isFavorite bool, createWidget bool) error {
|
||||
ret := _m.Called(objectId, isFavorite, createWidget)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for SetIsFavorite")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(string, bool) error); ok {
|
||||
r0 = rf(objectId, isFavorite)
|
||||
if rf, ok := ret.Get(0).(func(string, bool, bool) error); ok {
|
||||
r0 = rf(objectId, isFavorite, createWidget)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
@ -598,13 +589,14 @@ type MockService_SetIsFavorite_Call struct {
|
|||
// SetIsFavorite is a helper method to define mock.On call
|
||||
// - objectId string
|
||||
// - isFavorite bool
|
||||
func (_e *MockService_Expecter) SetIsFavorite(objectId interface{}, isFavorite interface{}) *MockService_SetIsFavorite_Call {
|
||||
return &MockService_SetIsFavorite_Call{Call: _e.mock.On("SetIsFavorite", objectId, isFavorite)}
|
||||
// - createWidget bool
|
||||
func (_e *MockService_Expecter) SetIsFavorite(objectId interface{}, isFavorite interface{}, createWidget interface{}) *MockService_SetIsFavorite_Call {
|
||||
return &MockService_SetIsFavorite_Call{Call: _e.mock.On("SetIsFavorite", objectId, isFavorite, createWidget)}
|
||||
}
|
||||
|
||||
func (_c *MockService_SetIsFavorite_Call) Run(run func(objectId string, isFavorite bool)) *MockService_SetIsFavorite_Call {
|
||||
func (_c *MockService_SetIsFavorite_Call) Run(run func(objectId string, isFavorite bool, createWidget bool)) *MockService_SetIsFavorite_Call {
|
||||
_c.Call.Run(func(args mock.Arguments) {
|
||||
run(args[0].(string), args[1].(bool))
|
||||
run(args[0].(string), args[1].(bool), args[2].(bool))
|
||||
})
|
||||
return _c
|
||||
}
|
||||
|
@ -614,7 +606,7 @@ func (_c *MockService_SetIsFavorite_Call) Return(_a0 error) *MockService_SetIsFa
|
|||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockService_SetIsFavorite_Call) RunAndReturn(run func(string, bool) error) *MockService_SetIsFavorite_Call {
|
||||
func (_c *MockService_SetIsFavorite_Call) RunAndReturn(run func(string, bool, bool) error) *MockService_SetIsFavorite_Call {
|
||||
_c.Call.Return(run)
|
||||
return _c
|
||||
}
|
||||
|
|
|
@ -14,11 +14,12 @@ import (
|
|||
"github.com/anyproto/anytype-heart/core/block/cache"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/smartblock"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
coresb "github.com/anyproto/anytype-heart/pkg/lib/core/smartblock"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/database"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/addr"
|
||||
"github.com/anyproto/anytype-heart/space/spacecore/typeprovider"
|
||||
"github.com/anyproto/anytype-heart/util/dateutil"
|
||||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
"github.com/anyproto/anytype-heart/util/slice"
|
||||
)
|
||||
|
@ -65,11 +66,11 @@ func (s *service) ObjectTypeRemoveRelations(ctx context.Context, objectTypeId st
|
|||
})
|
||||
}
|
||||
|
||||
func (s *service) ListRelationsWithValue(spaceId string, value *types.Value) (keys []string, counters []int64, err error) {
|
||||
func (s *service) ListRelationsWithValue(spaceId string, value *types.Value) ([]*pb.RpcRelationListWithValueResponseResponseItem, error) {
|
||||
countersByKeys := make(map[string]int64)
|
||||
detailHandlesValue := generateFilter(value)
|
||||
|
||||
err = s.store.SpaceIndex(spaceId).QueryIterate(database.Query{Filters: nil}, func(details *types.Struct) {
|
||||
err := s.store.SpaceIndex(spaceId).QueryIterate(database.Query{Filters: nil}, func(details *types.Struct) {
|
||||
for key, valueToCheck := range details.Fields {
|
||||
if detailHandlesValue(valueToCheck) {
|
||||
if counter, ok := countersByKeys[key]; ok {
|
||||
|
@ -82,27 +83,38 @@ func (s *service) ListRelationsWithValue(spaceId string, value *types.Value) (ke
|
|||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to query objects: %w", err)
|
||||
return nil, fmt.Errorf("failed to query objects: %w", err)
|
||||
}
|
||||
|
||||
keys = maps.Keys(countersByKeys)
|
||||
keys := maps.Keys(countersByKeys)
|
||||
slices.Sort(keys)
|
||||
list := make([]*pb.RpcRelationListWithValueResponseResponseItem, len(keys))
|
||||
|
||||
for _, key := range keys {
|
||||
counters = append(counters, countersByKeys[key])
|
||||
for i, key := range keys {
|
||||
list[i] = &pb.RpcRelationListWithValueResponseResponseItem{
|
||||
RelationKey: key,
|
||||
Counter: countersByKeys[key],
|
||||
}
|
||||
}
|
||||
|
||||
return keys, counters, nil
|
||||
return list, nil
|
||||
}
|
||||
|
||||
func generateFilter(value *types.Value) func(v *types.Value) bool {
|
||||
equalFilter := func(v *types.Value) bool {
|
||||
equalOrHasFilter := func(v *types.Value) bool {
|
||||
if list := v.GetListValue(); list != nil {
|
||||
for _, element := range list.Values {
|
||||
if element.Equal(value) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return v.Equal(value)
|
||||
}
|
||||
|
||||
stringValue := value.GetStringValue()
|
||||
if stringValue == "" {
|
||||
return equalFilter
|
||||
return equalOrHasFilter
|
||||
}
|
||||
|
||||
sbt, err := typeprovider.SmartblockTypeFromID(stringValue)
|
||||
|
@ -111,31 +123,42 @@ func generateFilter(value *types.Value) func(v *types.Value) bool {
|
|||
}
|
||||
|
||||
if sbt != coresb.SmartBlockTypeDate {
|
||||
return equalFilter
|
||||
return equalOrHasFilter
|
||||
}
|
||||
|
||||
start, err := dateIDToDayStart(stringValue)
|
||||
ts, err := dateutil.ParseDateId(stringValue)
|
||||
if err != nil {
|
||||
log.Error("failed to convert date id to day start", zap.Error(err))
|
||||
return equalFilter
|
||||
log.Error("failed to parse Date object id", zap.Error(err))
|
||||
return equalOrHasFilter
|
||||
}
|
||||
|
||||
shortId := dateutil.TimeToShortDateId(ts)
|
||||
|
||||
start := ts.Truncate(24 * time.Hour)
|
||||
end := start.Add(24 * time.Hour)
|
||||
startTimestamp := start.Unix()
|
||||
endTimestamp := end.Unix()
|
||||
|
||||
// filter for date objects is able to find relations with values between the borders of queried day
|
||||
// - for relations with number format it checks timestamp value is between timestamps of this day midnights
|
||||
// - for relations carrying string list it checks if some of the strings has day prefix, e.g.
|
||||
// if _date_2023-12-12-08-30-50 is queried, then all relations with prefix _date_2023-12-12 will be returned
|
||||
return func(v *types.Value) bool {
|
||||
numberValue := int64(v.GetNumberValue())
|
||||
if numberValue >= startTimestamp && numberValue < endTimestamp {
|
||||
return true
|
||||
}
|
||||
return equalFilter(v)
|
||||
}
|
||||
}
|
||||
|
||||
func dateIDToDayStart(id string) (time.Time, error) {
|
||||
if !strings.HasPrefix(id, addr.DatePrefix) {
|
||||
return time.Time{}, fmt.Errorf("invalid id: date prefix not found")
|
||||
if list := v.GetListValue(); list != nil {
|
||||
for _, element := range list.Values {
|
||||
if element.Equal(value) {
|
||||
return true
|
||||
}
|
||||
if strings.HasPrefix(element.GetStringValue(), shortId) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return v.Equal(value)
|
||||
}
|
||||
return time.Parse("2006-01-02", strings.TrimPrefix(id, addr.DatePrefix))
|
||||
}
|
||||
|
|
|
@ -13,10 +13,11 @@ import (
|
|||
"github.com/anyproto/anytype-heart/core/block/editor/smartblock/smarttest"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/state"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/addr"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
"github.com/anyproto/anytype-heart/util/dateutil"
|
||||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
)
|
||||
|
||||
|
@ -39,6 +40,7 @@ func TestService_ListRelationsWithValue(t *testing.T) {
|
|||
relationObject(bundle.RelationKeyAddedDate, model.RelationFormat_date),
|
||||
relationObject(bundle.RelationKeyCreatedDate, model.RelationFormat_date),
|
||||
relationObject(bundle.RelationKeyLinks, model.RelationFormat_object),
|
||||
relationObject(bundle.RelationKeyMentions, model.RelationFormat_object),
|
||||
relationObject(bundle.RelationKeyName, model.RelationFormat_longtext),
|
||||
relationObject(bundle.RelationKeyIsHidden, model.RelationFormat_checkbox),
|
||||
relationObject(bundle.RelationKeyIsFavorite, model.RelationFormat_checkbox),
|
||||
|
@ -52,12 +54,12 @@ func TestService_ListRelationsWithValue(t *testing.T) {
|
|||
bundle.RelationKeyLastModifiedDate: pbtypes.Int64(now.Add(-1 * time.Minute).Unix()),
|
||||
bundle.RelationKeyIsFavorite: pbtypes.Bool(true),
|
||||
"daysTillSummer": pbtypes.Int64(300),
|
||||
bundle.RelationKeyLinks: pbtypes.StringList([]string{"obj2", "obj3"}),
|
||||
bundle.RelationKeyLinks: pbtypes.StringList([]string{"obj2", "obj3", dateutil.TimeToDateId(now.Add(-30 * time.Minute))}),
|
||||
},
|
||||
{
|
||||
bundle.RelationKeyId: pbtypes.String("obj2"),
|
||||
bundle.RelationKeySpaceId: pbtypes.String(spaceId),
|
||||
bundle.RelationKeyName: pbtypes.String(addr.TimeToID(now)),
|
||||
bundle.RelationKeyName: pbtypes.String(dateutil.TimeToDateId(now)),
|
||||
bundle.RelationKeyCreatedDate: pbtypes.Int64(now.Add(-24*time.Hour - 5*time.Minute).Unix()),
|
||||
bundle.RelationKeyAddedDate: pbtypes.Int64(now.Add(-24*time.Hour - 3*time.Minute).Unix()),
|
||||
bundle.RelationKeyLastModifiedDate: pbtypes.Int64(now.Add(-1 * time.Minute).Unix()),
|
||||
|
@ -71,53 +73,66 @@ func TestService_ListRelationsWithValue(t *testing.T) {
|
|||
bundle.RelationKeyLastModifiedDate: pbtypes.Int64(now.Unix()),
|
||||
bundle.RelationKeyIsFavorite: pbtypes.Bool(true),
|
||||
bundle.RelationKeyCoverX: pbtypes.Int64(300),
|
||||
bundle.RelationKeyMentions: pbtypes.StringList([]string{dateutil.TimeToDateId(now), dateutil.TimeToDateId(now.Add(-24 * time.Hour))}),
|
||||
},
|
||||
})
|
||||
|
||||
bs := service{store: store}
|
||||
|
||||
for _, tc := range []struct {
|
||||
name string
|
||||
value *types.Value
|
||||
expectedKeys []string
|
||||
expectedCounters []int64
|
||||
name string
|
||||
value *types.Value
|
||||
expectedList []*pb.RpcRelationListWithValueResponseResponseItem
|
||||
}{
|
||||
{
|
||||
"date object - today",
|
||||
pbtypes.String(addr.TimeToID(now)),
|
||||
[]string{bundle.RelationKeyAddedDate.String(), bundle.RelationKeyCreatedDate.String(), bundle.RelationKeyLastModifiedDate.String(), bundle.RelationKeyName.String()},
|
||||
[]int64{1, 2, 3, 1},
|
||||
pbtypes.String(dateutil.TimeToDateId(now)),
|
||||
[]*pb.RpcRelationListWithValueResponseResponseItem{
|
||||
{bundle.RelationKeyAddedDate.String(), 1},
|
||||
{bundle.RelationKeyCreatedDate.String(), 2},
|
||||
{bundle.RelationKeyLastModifiedDate.String(), 3},
|
||||
{bundle.RelationKeyLinks.String(), 1},
|
||||
{bundle.RelationKeyMentions.String(), 1},
|
||||
{bundle.RelationKeyName.String(), 1},
|
||||
},
|
||||
},
|
||||
{
|
||||
"date object - yesterday",
|
||||
pbtypes.String(addr.TimeToID(now.Add(-24 * time.Hour))),
|
||||
[]string{bundle.RelationKeyAddedDate.String(), bundle.RelationKeyCreatedDate.String()},
|
||||
[]int64{1, 1},
|
||||
pbtypes.String(dateutil.TimeToDateId(now.Add(-24 * time.Hour))),
|
||||
[]*pb.RpcRelationListWithValueResponseResponseItem{
|
||||
{bundle.RelationKeyAddedDate.String(), 1},
|
||||
{bundle.RelationKeyCreatedDate.String(), 1},
|
||||
{bundle.RelationKeyMentions.String(), 1},
|
||||
},
|
||||
},
|
||||
{
|
||||
"number",
|
||||
pbtypes.Int64(300),
|
||||
[]string{bundle.RelationKeyCoverX.String(), "daysTillSummer"},
|
||||
[]int64{2, 1},
|
||||
[]*pb.RpcRelationListWithValueResponseResponseItem{
|
||||
{bundle.RelationKeyCoverX.String(), 2},
|
||||
{"daysTillSummer", 1},
|
||||
},
|
||||
},
|
||||
{
|
||||
"bool",
|
||||
pbtypes.Bool(true),
|
||||
[]string{bundle.RelationKeyIsFavorite.String(), bundle.RelationKeyIsHidden.String()},
|
||||
[]int64{2, 1},
|
||||
[]*pb.RpcRelationListWithValueResponseResponseItem{
|
||||
{bundle.RelationKeyIsFavorite.String(), 2},
|
||||
{bundle.RelationKeyIsHidden.String(), 1},
|
||||
},
|
||||
},
|
||||
{
|
||||
"string list",
|
||||
pbtypes.StringList([]string{"obj2", "obj3"}),
|
||||
[]string{bundle.RelationKeyLinks.String()},
|
||||
[]int64{1},
|
||||
pbtypes.StringList([]string{"obj2", "obj3", dateutil.TimeToDateId(now.Add(-30 * time.Minute))}),
|
||||
[]*pb.RpcRelationListWithValueResponseResponseItem{
|
||||
{bundle.RelationKeyLinks.String(), 1},
|
||||
},
|
||||
},
|
||||
} {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
keys, counters, err := bs.ListRelationsWithValue(spaceId, tc.value)
|
||||
list, err := bs.ListRelationsWithValue(spaceId, tc.value)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tc.expectedKeys, keys)
|
||||
assert.Equal(t, tc.expectedCounters, counters)
|
||||
assert.Equal(t, tc.expectedList, list)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,12 +41,12 @@ type Service interface {
|
|||
ObjectTypeAddRelations(ctx context.Context, objectTypeId string, relationKeys []domain.RelationKey) error
|
||||
ObjectTypeRemoveRelations(ctx context.Context, objectTypeId string, relationKeys []domain.RelationKey) error
|
||||
|
||||
ListRelationsWithValue(spaceId string, value *types.Value) (keys []string, counters []int64, err error)
|
||||
ListRelationsWithValue(spaceId string, value *types.Value) ([]*pb.RpcRelationListWithValueResponseResponseItem, error)
|
||||
|
||||
SetSpaceInfo(spaceId string, details *types.Struct) error
|
||||
SetWorkspaceDashboardId(ctx session.Context, workspaceId string, id string) (setId string, err error)
|
||||
|
||||
SetIsFavorite(objectId string, isFavorite bool) error
|
||||
SetIsFavorite(objectId string, isFavorite, createWidget bool) error
|
||||
SetIsArchived(objectId string, isArchived bool) error
|
||||
SetListIsFavorite(objectIds []string, isFavorite bool) error
|
||||
SetListIsArchived(objectIds []string, isArchived bool) error
|
||||
|
|
|
@ -325,19 +325,27 @@ func TestService_SetListIsFavorite(t *testing.T) {
|
|||
{bundle.RelationKeyId: pbtypes.String("obj2"), bundle.RelationKeySpaceId: pbtypes.String(spaceId)},
|
||||
{bundle.RelationKeyId: pbtypes.String("obj3"), bundle.RelationKeySpaceId: pbtypes.String(spaceId)},
|
||||
}
|
||||
homeId = "home"
|
||||
homeId = "home"
|
||||
widgetId = "widget"
|
||||
)
|
||||
|
||||
t.Run("no error on favoriting", func(t *testing.T) {
|
||||
// given
|
||||
fx := newFixture(t)
|
||||
sb := smarttest.New(homeId)
|
||||
sb.AddBlock(simple.New(&model.Block{Id: homeId, ChildrenIds: []string{}}))
|
||||
home := smarttest.New(homeId)
|
||||
home.AddBlock(simple.New(&model.Block{Id: homeId, ChildrenIds: []string{}}))
|
||||
widget := smarttest.New(widgetId)
|
||||
widget.AddBlock(simple.New(&model.Block{Id: widgetId, ChildrenIds: []string{}}))
|
||||
fx.store.AddObjects(t, spaceId, objects)
|
||||
fx.space.EXPECT().DerivedIDs().Return(threads.DerivedSmartblockIds{Home: homeId})
|
||||
fx.space.EXPECT().DerivedIDs().Return(threads.DerivedSmartblockIds{Home: homeId, Widgets: widgetId})
|
||||
fx.getter.EXPECT().GetObject(mock.Anything, mock.Anything).RunAndReturn(func(_ context.Context, objectId string) (smartblock.SmartBlock, error) {
|
||||
require.Equal(t, homeId, objectId)
|
||||
return editor.NewDashboard(sb, fx.store.SpaceIndex(spaceId), nil), nil
|
||||
switch objectId {
|
||||
case homeId:
|
||||
return editor.NewDashboard(home, fx.store.SpaceIndex(spaceId), nil), nil
|
||||
case widgetId:
|
||||
return editor.NewWidgetObject(widget, fx.store.SpaceIndex(spaceId), nil), nil
|
||||
}
|
||||
return nil, fmt.Errorf("failed to get object")
|
||||
})
|
||||
|
||||
// when
|
||||
|
@ -345,22 +353,30 @@ func TestService_SetListIsFavorite(t *testing.T) {
|
|||
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, sb.Blocks(), 4)
|
||||
assert.Len(t, home.Blocks(), 4)
|
||||
assert.Len(t, widget.Blocks(), 3)
|
||||
})
|
||||
|
||||
t.Run("no error on unfavoriting", func(t *testing.T) {
|
||||
// given
|
||||
fx := newFixture(t)
|
||||
sb := smarttest.New(homeId)
|
||||
sb.AddBlock(simple.New(&model.Block{Id: homeId, ChildrenIds: []string{"obj1", "obj2", "obj3"}}))
|
||||
sb.AddBlock(simple.New(&model.Block{Id: "obj1", Content: &model.BlockContentOfLink{Link: &model.BlockContentLink{TargetBlockId: "obj1"}}}))
|
||||
sb.AddBlock(simple.New(&model.Block{Id: "obj2", Content: &model.BlockContentOfLink{Link: &model.BlockContentLink{TargetBlockId: "obj2"}}}))
|
||||
sb.AddBlock(simple.New(&model.Block{Id: "obj3", Content: &model.BlockContentOfLink{Link: &model.BlockContentLink{TargetBlockId: "obj3"}}}))
|
||||
home := smarttest.New(homeId)
|
||||
home.AddBlock(simple.New(&model.Block{Id: homeId, ChildrenIds: []string{"obj1", "obj2", "obj3"}}))
|
||||
home.AddBlock(simple.New(&model.Block{Id: "obj1", Content: &model.BlockContentOfLink{Link: &model.BlockContentLink{TargetBlockId: "obj1"}}}))
|
||||
home.AddBlock(simple.New(&model.Block{Id: "obj2", Content: &model.BlockContentOfLink{Link: &model.BlockContentLink{TargetBlockId: "obj2"}}}))
|
||||
home.AddBlock(simple.New(&model.Block{Id: "obj3", Content: &model.BlockContentOfLink{Link: &model.BlockContentLink{TargetBlockId: "obj3"}}}))
|
||||
widget := smarttest.New(widgetId)
|
||||
widget.AddBlock(simple.New(&model.Block{Id: widgetId, ChildrenIds: []string{}}))
|
||||
fx.store.AddObjects(t, spaceId, objects)
|
||||
fx.space.EXPECT().DerivedIDs().Return(threads.DerivedSmartblockIds{Home: homeId})
|
||||
fx.space.EXPECT().DerivedIDs().Return(threads.DerivedSmartblockIds{Home: homeId, Widgets: widgetId})
|
||||
fx.getter.EXPECT().GetObject(mock.Anything, mock.Anything).RunAndReturn(func(_ context.Context, objectId string) (smartblock.SmartBlock, error) {
|
||||
require.Equal(t, homeId, objectId)
|
||||
return editor.NewDashboard(sb, fx.store.SpaceIndex(spaceId), nil), nil
|
||||
switch objectId {
|
||||
case homeId:
|
||||
return editor.NewDashboard(home, fx.store.SpaceIndex(spaceId), nil), nil
|
||||
case widgetId:
|
||||
return editor.NewWidgetObject(widget, fx.store.SpaceIndex(spaceId), nil), nil
|
||||
}
|
||||
return nil, fmt.Errorf("failed to get object")
|
||||
})
|
||||
|
||||
// when
|
||||
|
@ -368,24 +384,32 @@ func TestService_SetListIsFavorite(t *testing.T) {
|
|||
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, sb.Blocks(), 2)
|
||||
assert.Len(t, home.Blocks(), 2)
|
||||
assert.Len(t, widget.Blocks(), 1)
|
||||
})
|
||||
|
||||
t.Run("some updates failed", func(t *testing.T) {
|
||||
// given
|
||||
fx := newFixture(t)
|
||||
sb := smarttest.New(homeId)
|
||||
sb.AddBlock(simple.New(&model.Block{Id: homeId, ChildrenIds: []string{}}))
|
||||
home := smarttest.New(homeId)
|
||||
home.AddBlock(simple.New(&model.Block{Id: homeId, ChildrenIds: []string{}}))
|
||||
widget := smarttest.New(widgetId)
|
||||
widget.AddBlock(simple.New(&model.Block{Id: widgetId, ChildrenIds: []string{}}))
|
||||
fx.store.AddObjects(t, spaceId, objects)
|
||||
fx.space.EXPECT().DerivedIDs().Return(threads.DerivedSmartblockIds{Home: homeId})
|
||||
fx.space.EXPECT().DerivedIDs().Return(threads.DerivedSmartblockIds{Home: homeId, Widgets: widgetId})
|
||||
flag := false
|
||||
fx.getter.EXPECT().GetObject(mock.Anything, mock.Anything).RunAndReturn(func(_ context.Context, objectId string) (smartblock.SmartBlock, error) {
|
||||
require.Equal(t, homeId, objectId)
|
||||
if flag {
|
||||
return nil, fmt.Errorf("unexpected error")
|
||||
switch objectId {
|
||||
case homeId:
|
||||
if flag {
|
||||
return nil, fmt.Errorf("unexpected error")
|
||||
}
|
||||
flag = true
|
||||
return editor.NewDashboard(home, fx.store.SpaceIndex(spaceId), nil), nil
|
||||
case widgetId:
|
||||
return editor.NewWidgetObject(widget, fx.store.SpaceIndex(spaceId), nil), nil
|
||||
}
|
||||
flag = true
|
||||
return editor.NewDashboard(sb, fx.store.SpaceIndex(spaceId), nil), nil
|
||||
return nil, fmt.Errorf("failed to get object")
|
||||
})
|
||||
|
||||
// when
|
||||
|
@ -393,14 +417,15 @@ func TestService_SetListIsFavorite(t *testing.T) {
|
|||
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, sb.Blocks(), 2)
|
||||
assert.Len(t, home.Blocks(), 2)
|
||||
assert.Len(t, widget.Blocks(), 3)
|
||||
})
|
||||
|
||||
t.Run("all updates failed", func(t *testing.T) {
|
||||
// given
|
||||
fx := newFixture(t)
|
||||
fx.store.AddObjects(t, spaceId, objects)
|
||||
fx.space.EXPECT().DerivedIDs().Return(threads.DerivedSmartblockIds{Home: homeId})
|
||||
fx.space.EXPECT().DerivedIDs().Return(threads.DerivedSmartblockIds{Home: homeId, Widgets: widgetId})
|
||||
fx.getter.EXPECT().GetObject(mock.Anything, mock.Anything).RunAndReturn(func(_ context.Context, objectId string) (smartblock.SmartBlock, error) {
|
||||
require.Equal(t, homeId, objectId)
|
||||
return nil, fmt.Errorf("unexpected error")
|
||||
|
|
|
@ -12,7 +12,11 @@ import (
|
|||
"github.com/anyproto/anytype-heart/core/block/editor"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/collection"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/smartblock"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/state"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/widget"
|
||||
"github.com/anyproto/anytype-heart/core/block/simple"
|
||||
"github.com/anyproto/anytype-heart/core/session"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
coresb "github.com/anyproto/anytype-heart/pkg/lib/core/smartblock"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
|
@ -57,7 +61,7 @@ func (s *service) SetWorkspaceDashboardId(ctx session.Context, workspaceId strin
|
|||
return id, err
|
||||
}
|
||||
|
||||
func (s *service) SetIsFavorite(objectId string, isFavorite bool) error {
|
||||
func (s *service) SetIsFavorite(objectId string, isFavorite, createWidget bool) error {
|
||||
spaceID, err := s.resolver.ResolveSpaceID(objectId)
|
||||
if err != nil {
|
||||
return fmt.Errorf("resolve spaceID: %w", err)
|
||||
|
@ -66,7 +70,18 @@ func (s *service) SetIsFavorite(objectId string, isFavorite bool) error {
|
|||
if err != nil {
|
||||
return fmt.Errorf("get space: %w", err)
|
||||
}
|
||||
return s.objectLinksCollectionModify(spc.DerivedIDs().Home, objectId, isFavorite)
|
||||
if err = s.objectLinksCollectionModify(spc.DerivedIDs().Home, objectId, isFavorite); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if createWidget && isFavorite {
|
||||
err = s.createFavoriteWidget(spc.DerivedIDs().Widgets)
|
||||
if err != nil {
|
||||
log.Error("failed to create favorite widget", zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) SetIsArchived(objectId string, isArchived bool) error {
|
||||
|
@ -100,9 +115,9 @@ func (s *service) SetListIsFavorite(objectIds []string, isFavorite bool) error {
|
|||
return err
|
||||
}
|
||||
|
||||
for _, id := range ids {
|
||||
for i, id := range ids {
|
||||
// TODO Set list of ids at once
|
||||
err := s.SetIsFavorite(id, isFavorite)
|
||||
err := s.SetIsFavorite(id, isFavorite, i == 0)
|
||||
if err != nil {
|
||||
log.Error("failed to favorite object", zap.String("objectId", id), zap.Error(err))
|
||||
resultError = errors.Join(resultError, err)
|
||||
|
@ -229,3 +244,42 @@ func (s *service) modifyArchiveLinks(
|
|||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (s *service) createFavoriteWidget(widgetObjectId string) error {
|
||||
return cache.DoState(s.objectGetter, widgetObjectId, func(st *state.State, w widget.Widget) (err error) {
|
||||
var favoriteBlockFound bool
|
||||
err = st.Iterate(func(b simple.Block) (isContinue bool) {
|
||||
link := b.Model().GetLink()
|
||||
if link == nil {
|
||||
return true
|
||||
}
|
||||
if link.TargetBlockId == widget.DefaultWidgetFavorite {
|
||||
favoriteBlockFound = true
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if favoriteBlockFound {
|
||||
log.Debug("favorite widget block is already presented")
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = w.CreateBlock(st, &pb.RpcBlockCreateWidgetRequest{
|
||||
ContextId: widgetObjectId,
|
||||
ObjectLimit: 6,
|
||||
WidgetLayout: model.BlockContentWidget_List,
|
||||
Position: model.Block_Bottom,
|
||||
Block: &model.Block{
|
||||
Content: &model.BlockContentOfLink{Link: &model.BlockContentLink{
|
||||
TargetBlockId: widget.DefaultWidgetFavorite,
|
||||
}},
|
||||
},
|
||||
})
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ import (
|
|||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
)
|
||||
|
||||
var ErrOptionUsedByOtherObjects = fmt.Errorf("option is used by other objects")
|
||||
|
@ -40,6 +39,7 @@ type FileUploadRequest struct {
|
|||
type UploadRequest struct {
|
||||
pb.RpcBlockUploadRequest
|
||||
ObjectOrigin objectorigin.ObjectOrigin
|
||||
ImageKind model.ImageKind
|
||||
}
|
||||
|
||||
type BookmarkFetchRequest struct {
|
||||
|
@ -47,17 +47,6 @@ type BookmarkFetchRequest struct {
|
|||
ObjectOrigin objectorigin.ObjectOrigin
|
||||
}
|
||||
|
||||
func (s *Service) MarkArchived(ctx session.Context, id string, archived bool) (err error) {
|
||||
return cache.Do(s, id, func(b basic.CommonOperations) error {
|
||||
return b.SetDetails(nil, []*model.Detail{
|
||||
{
|
||||
Key: "isArchived",
|
||||
Value: pbtypes.Bool(archived),
|
||||
},
|
||||
}, true)
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Service) CreateBlock(ctx session.Context, req pb.RpcBlockCreateRequest) (id string, err error) {
|
||||
err = cache.DoStateCtx(s, ctx, req.ContextId, func(st *state.State, b basic.Creatable) error {
|
||||
id, err = b.CreateBlock(st, req)
|
||||
|
@ -349,29 +338,21 @@ func (s *Service) FeaturedRelationRemove(ctx session.Context, contextId string,
|
|||
})
|
||||
}
|
||||
|
||||
func (s *Service) UploadBlockFile(ctx session.Context, req UploadRequest, groupID string) (err error) {
|
||||
return cache.Do(s, req.ContextId, func(b file.File) error {
|
||||
_, err = b.Upload(ctx, req.BlockId, file.FileSource{
|
||||
Path: req.FilePath,
|
||||
Url: req.Url,
|
||||
Bytes: req.Bytes,
|
||||
GroupID: groupID,
|
||||
Origin: req.ObjectOrigin,
|
||||
}, false)
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Service) UploadBlockFileSync(ctx session.Context, req UploadRequest) (err error) {
|
||||
return cache.Do(s, req.ContextId, func(b file.File) error {
|
||||
_, err = b.Upload(ctx, req.BlockId, file.FileSource{
|
||||
Path: req.FilePath,
|
||||
Url: req.Url,
|
||||
Bytes: req.Bytes,
|
||||
Origin: req.ObjectOrigin,
|
||||
}, true)
|
||||
func (s *Service) UploadBlockFile(
|
||||
ctx session.Context, req UploadRequest, groupID string, isSync bool,
|
||||
) (fileObjectId string, err error) {
|
||||
err = cache.Do(s, req.ContextId, func(b file.File) error {
|
||||
fileObjectId, err = b.Upload(ctx, req.BlockId, file.FileSource{
|
||||
Path: req.FilePath,
|
||||
Url: req.Url,
|
||||
Bytes: req.Bytes,
|
||||
GroupID: groupID,
|
||||
Origin: req.ObjectOrigin,
|
||||
ImageKind: req.ImageKind,
|
||||
}, isSync)
|
||||
return err
|
||||
})
|
||||
return fileObjectId, err
|
||||
}
|
||||
|
||||
func (s *Service) CreateAndUploadFile(
|
||||
|
@ -403,6 +384,9 @@ func (s *Service) UploadFile(ctx context.Context, spaceId string, req FileUpload
|
|||
} else if req.Url != "" {
|
||||
upl.SetUrl(req.Url)
|
||||
}
|
||||
if req.ImageKind != model.ImageKind_Basic {
|
||||
upl.SetImageKind(req.ImageKind)
|
||||
}
|
||||
res := upl.Upload(ctx)
|
||||
if res.Err != nil {
|
||||
return "", nil, res.Err
|
||||
|
@ -430,25 +414,6 @@ func (s *Service) SetFileStyle(
|
|||
})
|
||||
}
|
||||
|
||||
func (s *Service) UploadFileBlock(
|
||||
contextID string, req UploadRequest,
|
||||
) (fileObjectId string, err error) {
|
||||
err = cache.Do(s, contextID, func(b file.File) error {
|
||||
fileObjectId, err = b.Upload(nil, req.BlockId, file.FileSource{
|
||||
Path: req.FilePath,
|
||||
Url: req.Url,
|
||||
Bytes: req.Bytes,
|
||||
GroupID: "",
|
||||
Origin: req.ObjectOrigin,
|
||||
}, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return fileObjectId, err
|
||||
}
|
||||
|
||||
func (s *Service) Undo(
|
||||
ctx session.Context, req pb.RpcObjectUndoRequest,
|
||||
) (info basic.HistoryInfo, err error) {
|
||||
|
|
|
@ -62,13 +62,17 @@ func (p *Archive) CreationStateMigration(ctx *smartblock.InitContext) migration.
|
|||
template.WithNoObjectTypes(),
|
||||
template.WithDetailName("Archive"),
|
||||
template.WithDetailIconEmoji("🗑"),
|
||||
template.WithForcedDetail(bundle.RelationKeyIsHidden, pbtypes.Bool(true)),
|
||||
)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Archive) StateMigrations() migration.Migrations {
|
||||
return migration.MakeMigrations(nil)
|
||||
return migration.MakeMigrations([]migration.Migration{{
|
||||
Version: 2,
|
||||
Proc: template.WithForcedDetail(bundle.RelationKeyIsHidden, pbtypes.Bool(true)),
|
||||
}})
|
||||
}
|
||||
|
||||
func (p *Archive) Relations(_ *state.State) relationutils.Relations {
|
||||
|
|
|
@ -385,6 +385,10 @@ func (bs *basic) SetObjectTypesInState(s *state.State, objectTypeKeys []domain.T
|
|||
if err = bs.Restrictions().Object.Check(model.Restrictions_TypeChange); errors.Is(err, restriction.ErrRestricted) {
|
||||
return fmt.Errorf("objectType change is restricted for object '%s': %w", bs.Id(), err)
|
||||
}
|
||||
|
||||
if objectTypeKeys[0] == bundle.TypeKeyTemplate {
|
||||
return fmt.Errorf("changing object type to template is restricted")
|
||||
}
|
||||
}
|
||||
|
||||
s.SetObjectTypeKeys(objectTypeKeys)
|
||||
|
|
|
@ -5,9 +5,13 @@ import (
|
|||
|
||||
"github.com/gogo/protobuf/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/converter"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/lastused/mock_lastused"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/smartblock/smarttest"
|
||||
"github.com/anyproto/anytype-heart/core/block/restriction"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore/spaceindex"
|
||||
|
@ -15,10 +19,11 @@ import (
|
|||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
)
|
||||
|
||||
type duFixture struct {
|
||||
sb *smarttest.SmartTest
|
||||
store *spaceindex.StoreFixture
|
||||
basic DetailsUpdatable
|
||||
type basicFixture struct {
|
||||
sb *smarttest.SmartTest
|
||||
store *spaceindex.StoreFixture
|
||||
lastUsed *mock_lastused.MockObjectUsageUpdater
|
||||
basic CommonOperations
|
||||
}
|
||||
|
||||
var (
|
||||
|
@ -26,26 +31,28 @@ var (
|
|||
spaceId = "space1"
|
||||
)
|
||||
|
||||
func newDUFixture(t *testing.T) *duFixture {
|
||||
func newBasicFixture(t *testing.T) *basicFixture {
|
||||
sb := smarttest.New(objectId)
|
||||
sb.SetDetails(nil, nil, false)
|
||||
sb.SetSpaceId(spaceId)
|
||||
|
||||
store := spaceindex.NewStoreFixture(t)
|
||||
lastUsed := mock_lastused.NewMockObjectUsageUpdater(t)
|
||||
|
||||
b := NewBasic(sb, store, converter.NewLayoutConverter(), nil, nil)
|
||||
b := NewBasic(sb, store, converter.NewLayoutConverter(), nil, lastUsed)
|
||||
|
||||
return &duFixture{
|
||||
sb: sb,
|
||||
store: store,
|
||||
basic: b,
|
||||
return &basicFixture{
|
||||
sb: sb,
|
||||
store: store,
|
||||
lastUsed: lastUsed,
|
||||
basic: b,
|
||||
}
|
||||
}
|
||||
|
||||
func TestBasic_UpdateDetails(t *testing.T) {
|
||||
t.Run("add new details", func(t *testing.T) {
|
||||
// given
|
||||
f := newDUFixture(t)
|
||||
f := newBasicFixture(t)
|
||||
f.store.AddObjects(t, []objectstore.TestObject{{
|
||||
bundle.RelationKeyId: pbtypes.String("rel-aperture"),
|
||||
bundle.RelationKeySpaceId: pbtypes.String(spaceId),
|
||||
|
@ -83,7 +90,7 @@ func TestBasic_UpdateDetails(t *testing.T) {
|
|||
|
||||
t.Run("modify details", func(t *testing.T) {
|
||||
// given
|
||||
f := newDUFixture(t)
|
||||
f := newBasicFixture(t)
|
||||
err := f.sb.SetDetails(nil, []*model.Detail{{
|
||||
Key: bundle.RelationKeySpaceDashboardId.String(),
|
||||
Value: pbtypes.String("123"),
|
||||
|
@ -114,7 +121,7 @@ func TestBasic_UpdateDetails(t *testing.T) {
|
|||
|
||||
t.Run("delete details", func(t *testing.T) {
|
||||
// given
|
||||
f := newDUFixture(t)
|
||||
f := newBasicFixture(t)
|
||||
err := f.sb.SetDetails(nil, []*model.Detail{{
|
||||
Key: bundle.RelationKeyTargetObjectType.String(),
|
||||
Value: pbtypes.String("ot-note"),
|
||||
|
@ -136,3 +143,52 @@ func TestBasic_UpdateDetails(t *testing.T) {
|
|||
assert.False(t, f.sb.HasRelation(f.sb.NewState(), bundle.RelationKeyTargetObjectType.String()))
|
||||
})
|
||||
}
|
||||
|
||||
func TestBasic_SetObjectTypesInState(t *testing.T) {
|
||||
t.Run("no error", func(t *testing.T) {
|
||||
// given
|
||||
f := newBasicFixture(t)
|
||||
|
||||
f.lastUsed.EXPECT().UpdateLastUsedDate(mock.Anything, bundle.TypeKeyTask, mock.Anything).Return().Once()
|
||||
f.store.AddObjects(t, []objectstore.TestObject{{
|
||||
bundle.RelationKeySpaceId: pbtypes.String(spaceId),
|
||||
bundle.RelationKeyId: pbtypes.String("ot-task"),
|
||||
bundle.RelationKeyUniqueKey: pbtypes.String("ot-task"),
|
||||
bundle.RelationKeyLayout: pbtypes.Int64(int64(model.ObjectType_todo)),
|
||||
}})
|
||||
|
||||
s := f.sb.NewState()
|
||||
|
||||
// when
|
||||
err := f.basic.SetObjectTypesInState(s, []domain.TypeKey{bundle.TypeKeyTask}, false)
|
||||
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, bundle.TypeKeyTask, s.ObjectTypeKey())
|
||||
})
|
||||
|
||||
t.Run("type change is restricted", func(t *testing.T) {
|
||||
// given
|
||||
f := newBasicFixture(t)
|
||||
f.sb.TestRestrictions = restriction.Restrictions{Object: []model.RestrictionsObjectRestriction{model.Restrictions_TypeChange}}
|
||||
s := f.sb.NewState()
|
||||
|
||||
// when
|
||||
err := f.basic.SetObjectTypesInState(s, []domain.TypeKey{bundle.TypeKeyTask}, false)
|
||||
|
||||
// then
|
||||
assert.ErrorIs(t, err, restriction.ErrRestricted)
|
||||
})
|
||||
|
||||
t.Run("changing to template type is restricted", func(t *testing.T) {
|
||||
// given
|
||||
f := newBasicFixture(t)
|
||||
s := f.sb.NewState()
|
||||
|
||||
// when
|
||||
err := f.basic.SetObjectTypesInState(s, []domain.TypeKey{bundle.TypeKeyTemplate}, false)
|
||||
|
||||
// then
|
||||
assert.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -61,9 +61,9 @@ func NewClipboard(sb smartblock.SmartBlock, file file.File, tempDirProvider core
|
|||
type clipboard struct {
|
||||
smartblock.SmartBlock
|
||||
file file.File
|
||||
tempDirProvider core.TempDirProvider
|
||||
objectStore spaceindex.Store
|
||||
fileService files.Service
|
||||
tempDirProvider core.TempDirProvider
|
||||
objectStore spaceindex.Store
|
||||
fileService files.Service
|
||||
fileObjectService fileobject.Service
|
||||
}
|
||||
|
||||
|
@ -524,11 +524,13 @@ func (cb *clipboard) pasteFiles(ctx session.Context, req *pb.RpcBlockPasteReques
|
|||
},
|
||||
})
|
||||
s.Add(b)
|
||||
|
||||
if err = cb.file.UploadState(ctx, s, b.Model().Id, file.FileSource{
|
||||
Bytes: fs.Data,
|
||||
Path: fs.LocalPath,
|
||||
Name: fs.Name,
|
||||
Origin: objectorigin.Clipboard(),
|
||||
Bytes: fs.Data,
|
||||
Path: fs.LocalPath,
|
||||
Name: fs.Name,
|
||||
Origin: objectorigin.Clipboard(),
|
||||
ImageKind: model.ImageKind_Basic,
|
||||
}, false); err != nil {
|
||||
return
|
||||
}
|
||||
|
|
|
@ -60,13 +60,17 @@ func (p *Dashboard) CreationStateMigration(ctx *smartblock.InitContext) migratio
|
|||
template.WithDetailName("Home"),
|
||||
template.WithDetailIconEmoji("🏠"),
|
||||
template.WithNoDuplicateLinks(),
|
||||
template.WithForcedDetail(bundle.RelationKeyIsHidden, pbtypes.Bool(true)),
|
||||
)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Dashboard) StateMigrations() migration.Migrations {
|
||||
return migration.MakeMigrations(nil)
|
||||
return migration.MakeMigrations([]migration.Migration{{
|
||||
Version: 2,
|
||||
Proc: template.WithForcedDetail(bundle.RelationKeyIsHidden, pbtypes.Bool(true)),
|
||||
}})
|
||||
}
|
||||
|
||||
func (p *Dashboard) updateObjects(info smartblock.ApplyInfo) (err error) {
|
||||
|
|
|
@ -16,6 +16,7 @@ import (
|
|||
"github.com/anyproto/anytype-heart/core/block/cache"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/smartblock"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/state"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/template"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/core/block/simple"
|
||||
"github.com/anyproto/anytype-heart/core/block/simple/file"
|
||||
|
@ -62,12 +63,13 @@ type File interface {
|
|||
}
|
||||
|
||||
type FileSource struct {
|
||||
Path string
|
||||
Url string // nolint:revive
|
||||
Bytes []byte
|
||||
Name string
|
||||
GroupID string
|
||||
Origin objectorigin.ObjectOrigin
|
||||
Path string
|
||||
Url string // nolint:revive
|
||||
Bytes []byte
|
||||
Name string
|
||||
GroupID string
|
||||
Origin objectorigin.ObjectOrigin
|
||||
ImageKind model.ImageKind
|
||||
}
|
||||
|
||||
type sfile struct {
|
||||
|
@ -160,8 +162,9 @@ func (sf *sfile) CreateAndUpload(ctx session.Context, req pb.RpcBlockFileCreateA
|
|||
return
|
||||
}
|
||||
if err = sf.upload(s, newId, FileSource{
|
||||
Path: req.LocalPath,
|
||||
Url: req.Url,
|
||||
Path: req.LocalPath,
|
||||
Url: req.Url,
|
||||
ImageKind: req.ImageKind,
|
||||
}, false).Err; err != nil {
|
||||
return
|
||||
}
|
||||
|
@ -179,6 +182,9 @@ func (sf *sfile) upload(s *state.State, id string, source FileSource, isSync boo
|
|||
return fileuploader.UploadResult{Err: fmt.Errorf("not a file block")}
|
||||
}
|
||||
upl := sf.newUploader(source.Origin).SetBlock(f)
|
||||
if source.ImageKind != model.ImageKind_Basic {
|
||||
upl.SetImageKind(source.ImageKind)
|
||||
}
|
||||
if source.Path != "" {
|
||||
upl.SetFile(source.Path)
|
||||
} else if source.Url != "" {
|
||||
|
@ -220,8 +226,10 @@ func (sf *sfile) updateFile(ctx session.Context, id, groupId string, apply func(
|
|||
}
|
||||
|
||||
func (sf *sfile) DropFiles(req pb.RpcFileDropRequest) (err error) {
|
||||
if err = sf.Restrictions().Object.Check(model.Restrictions_Blocks); err != nil {
|
||||
return err
|
||||
if !isCollection(sf) {
|
||||
if err = sf.Restrictions().Object.Check(model.Restrictions_Blocks); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
proc := &dropFilesProcess{
|
||||
spaceID: sf.SpaceID(),
|
||||
|
@ -233,7 +241,7 @@ func (sf *sfile) DropFiles(req pb.RpcFileDropRequest) (err error) {
|
|||
return
|
||||
}
|
||||
var ch = make(chan error)
|
||||
go proc.Start(sf.RootId(), req.DropTargetId, req.Position, ch)
|
||||
go proc.Start(sf, req.DropTargetId, req.Position, ch)
|
||||
err = <-ch
|
||||
return
|
||||
}
|
||||
|
@ -247,7 +255,6 @@ func (sf *sfile) dropFilesCreateStructure(groupId, targetId string, pos model.Bl
|
|||
for _, entry := range entries {
|
||||
var blockId, pageId string
|
||||
if entry.isDir {
|
||||
|
||||
if err = sf.Apply(s); err != nil {
|
||||
return
|
||||
}
|
||||
|
@ -303,6 +310,13 @@ func (sf *sfile) dropFilesSetInfo(info dropFileInfo) (err error) {
|
|||
s.Unlink(info.blockId)
|
||||
return sf.Apply(s)
|
||||
}
|
||||
if isCollection(sf) {
|
||||
s := sf.NewState()
|
||||
if !s.HasInStore([]string{info.file.TargetObjectId}) {
|
||||
s.UpdateStoreSlice(template.CollectionStoreKey, append(s.GetStoreSlice(template.CollectionStoreKey), info.file.TargetObjectId))
|
||||
}
|
||||
return sf.Apply(s)
|
||||
}
|
||||
return sf.UpdateFile(info.blockId, info.groupId, func(f file.Block) error {
|
||||
if info.err != nil || info.file == nil || info.file.State == model.BlockContentFile_Error {
|
||||
if info.err != nil {
|
||||
|
@ -377,12 +391,12 @@ func (dp *dropFilesProcess) Info() pb.ModelProcess {
|
|||
}
|
||||
return pb.ModelProcess{
|
||||
Id: dp.id,
|
||||
Type: pb.ModelProcess_DropFiles,
|
||||
State: state,
|
||||
Progress: &pb.ModelProcessProgress{
|
||||
Total: atomic.LoadInt64(&dp.total),
|
||||
Done: atomic.LoadInt64(&dp.done),
|
||||
},
|
||||
Message: &pb.ModelProcessMessageOfDropFiles{DropFiles: &pb.ModelProcessDropFiles{}},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -450,7 +464,7 @@ func (dp *dropFilesProcess) readdir(entry *dropFileEntry, allowSymlinks bool) (o
|
|||
return true, nil
|
||||
}
|
||||
|
||||
func (dp *dropFilesProcess) Start(rootId, targetId string, pos model.BlockPosition, rootDone chan error) {
|
||||
func (dp *dropFilesProcess) Start(file smartblock.SmartBlock, targetId string, pos model.BlockPosition, rootDone chan error) {
|
||||
dp.id = uuid.New().String()
|
||||
dp.doneCh = make(chan struct{})
|
||||
dp.cancel = make(chan struct{})
|
||||
|
@ -469,6 +483,46 @@ func (dp *dropFilesProcess) Start(rootId, targetId string, pos model.BlockPositi
|
|||
go dp.addFilesWorker(wg, in)
|
||||
}
|
||||
|
||||
if isCollection(file) {
|
||||
dp.handleDragAndDropInCollection(file.RootId(), dp.root.child, rootDone, in)
|
||||
} else {
|
||||
dp.handleDragAndDropInDocument(file.RootId(), targetId, pos, rootDone, in)
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func (dp *dropFilesProcess) handleDragAndDropInCollection(rootId string, droppedFiles []*dropFileEntry, rootDone chan error, in chan *dropFileInfo) {
|
||||
close(rootDone)
|
||||
filesToUpload := dp.getFilesToUploadFromDirs(droppedFiles)
|
||||
for _, entry := range filesToUpload {
|
||||
in <- &dropFileInfo{
|
||||
pageId: rootId,
|
||||
path: entry.path,
|
||||
name: entry.name,
|
||||
}
|
||||
}
|
||||
close(in)
|
||||
}
|
||||
|
||||
func (dp *dropFilesProcess) getFilesToUploadFromDirs(droppedFiles []*dropFileEntry) []*dropFileEntry {
|
||||
var (
|
||||
stack []*dropFileEntry
|
||||
totalFiles []*dropFileEntry
|
||||
)
|
||||
stack = append(stack, droppedFiles...)
|
||||
for len(stack) > 0 {
|
||||
entry := stack[len(stack)-1]
|
||||
stack = stack[:len(stack)-1]
|
||||
if entry.isDir {
|
||||
stack = append(stack, entry.child...)
|
||||
} else {
|
||||
totalFiles = append(totalFiles, entry)
|
||||
}
|
||||
}
|
||||
return totalFiles
|
||||
}
|
||||
|
||||
func (dp *dropFilesProcess) handleDragAndDropInDocument(rootId, targetId string, pos model.BlockPosition, rootDone chan error, in chan *dropFileInfo) {
|
||||
var flatEntries = [][]*dropFileEntry{dp.root.child}
|
||||
var smartBlockIds = []string{rootId}
|
||||
var handleLevel = func(idx int) (isContinue bool, err error) {
|
||||
|
@ -533,8 +587,6 @@ func (dp *dropFilesProcess) Start(rootId, targetId string, pos model.BlockPositi
|
|||
idx++
|
||||
}
|
||||
close(in)
|
||||
wg.Wait()
|
||||
return
|
||||
}
|
||||
|
||||
func (dp *dropFilesProcess) addFilesWorker(wg *sync.WaitGroup, in chan *dropFileInfo) {
|
||||
|
@ -590,3 +642,8 @@ func (dp *dropFilesProcess) apply(f *dropFileInfo) (err error) {
|
|||
return sbHandler.dropFilesSetInfo(*f)
|
||||
})
|
||||
}
|
||||
|
||||
func isCollection(smartBlock smartblock.SmartBlock) bool {
|
||||
layout, ok := smartBlock.Layout()
|
||||
return ok && layout == model.ObjectType_collection
|
||||
}
|
||||
|
|
|
@ -1,18 +1,42 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/anyproto/any-sync/accountservice/mock_accountservice"
|
||||
"github.com/anyproto/any-sync/app"
|
||||
"github.com/anyproto/any-sync/commonfile/fileservice"
|
||||
"github.com/gogo/protobuf/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.uber.org/mock/gomock"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/anytype/config"
|
||||
"github.com/anyproto/anytype-heart/core/block/cache/mock_cache"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/smartblock/smarttest"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/template"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/core/block/restriction"
|
||||
"github.com/anyproto/anytype-heart/core/event/mock_event"
|
||||
"github.com/anyproto/anytype-heart/core/files"
|
||||
"github.com/anyproto/anytype-heart/core/files/fileobject/mock_fileobject"
|
||||
"github.com/anyproto/anytype-heart/core/files/fileuploader"
|
||||
"github.com/anyproto/anytype-heart/core/filestorage"
|
||||
"github.com/anyproto/anytype-heart/core/filestorage/filesync"
|
||||
"github.com/anyproto/anytype-heart/core/filestorage/rpcstore"
|
||||
wallet2 "github.com/anyproto/anytype-heart/core/wallet"
|
||||
"github.com/anyproto/anytype-heart/core/wallet/mock_wallet"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/core"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/datastore"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/filestore"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
"github.com/anyproto/anytype-heart/tests/blockbuilder"
|
||||
"github.com/anyproto/anytype-heart/tests/testutil"
|
||||
|
@ -21,21 +45,31 @@ import (
|
|||
|
||||
type fileFixture struct {
|
||||
sfile
|
||||
pickerFx *mock_cache.MockObjectGetter
|
||||
sb *smarttest.SmartTest
|
||||
pickerFx *mock_cache.MockObjectGetter
|
||||
sb *smarttest.SmartTest
|
||||
mockSender *mock_event.MockSender
|
||||
}
|
||||
|
||||
func newFixture(t *testing.T) *fileFixture {
|
||||
picker := mock_cache.NewMockObjectGetter(t)
|
||||
sb := smarttest.New("root")
|
||||
mockSender := mock_event.NewMockSender(t)
|
||||
fx := &fileFixture{
|
||||
pickerFx: picker,
|
||||
sb: sb,
|
||||
pickerFx: picker,
|
||||
sb: sb,
|
||||
mockSender: mockSender,
|
||||
}
|
||||
|
||||
a := &app.App{}
|
||||
a.Register(testutil.PrepareMock(context.Background(), a, mockSender))
|
||||
service := process.New()
|
||||
err := service.Init(a)
|
||||
assert.Nil(t, err)
|
||||
|
||||
fx.sfile = sfile{
|
||||
SmartBlock: sb,
|
||||
picker: picker,
|
||||
SmartBlock: sb,
|
||||
picker: picker,
|
||||
processService: service,
|
||||
}
|
||||
return fx
|
||||
}
|
||||
|
@ -119,4 +153,167 @@ func TestDropFiles(t *testing.T) {
|
|||
assert.Error(t, err)
|
||||
assert.True(t, errors.Is(err, restriction.ErrRestricted))
|
||||
})
|
||||
t.Run("drop files in collection - no restriction error", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
file, err := os.Create(filepath.Join(dir, "test"))
|
||||
assert.Nil(t, err)
|
||||
|
||||
fx := newFixture(t)
|
||||
st := fx.sb.Doc.NewState()
|
||||
st.SetDetail(bundle.RelationKeyLayout.String(), pbtypes.Int64(int64(model.ObjectType_collection)))
|
||||
fx.sb.Doc = st
|
||||
fx.pickerFx.EXPECT().GetObject(context.Background(), "root").Return(fx, nil).Maybe()
|
||||
fx.mockSender.EXPECT().Broadcast(mock.Anything).Return().Maybe()
|
||||
mockService := mock_fileobject.NewMockService(t)
|
||||
mockService.EXPECT().Create(mock.Anything, mock.Anything, mock.Anything).Return("fileObjectId", &types.Struct{Fields: map[string]*types.Value{}}, nil).Maybe()
|
||||
fx.fileUploaderFactory = prepareFileService(t, fx.mockSender, mockService)
|
||||
|
||||
// when
|
||||
err = fx.sfile.DropFiles(pb.RpcFileDropRequest{
|
||||
ContextId: "root",
|
||||
LocalFilePaths: []string{file.Name()},
|
||||
})
|
||||
|
||||
// then
|
||||
assert.Nil(t, err)
|
||||
})
|
||||
t.Run("drop dir in collection - no restriction error", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
_, err := os.Create(filepath.Join(dir, "test"))
|
||||
assert.Nil(t, err)
|
||||
|
||||
fx := newFixture(t)
|
||||
st := fx.sb.Doc.NewState()
|
||||
st.SetDetail(bundle.RelationKeyLayout.String(), pbtypes.Int64(int64(model.ObjectType_collection)))
|
||||
fx.sb.Doc = st
|
||||
fx.pickerFx.EXPECT().GetObject(context.Background(), "root").Return(fx, nil).Maybe()
|
||||
fx.mockSender.EXPECT().Broadcast(mock.Anything).Return().Maybe()
|
||||
mockService := mock_fileobject.NewMockService(t)
|
||||
mockService.EXPECT().Create(mock.Anything, mock.Anything, mock.Anything).Return("fileObjectId", &types.Struct{Fields: map[string]*types.Value{}}, nil).Maybe()
|
||||
fx.fileUploaderFactory = prepareFileService(t, fx.mockSender, mockService)
|
||||
|
||||
// when
|
||||
err = fx.sfile.DropFiles(pb.RpcFileDropRequest{
|
||||
ContextId: "root",
|
||||
LocalFilePaths: []string{dir},
|
||||
})
|
||||
|
||||
// then
|
||||
assert.Nil(t, err)
|
||||
})
|
||||
t.Run("drop files in collection - success", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
file, err := os.Create(filepath.Join(dir, "test"))
|
||||
assert.Nil(t, err)
|
||||
|
||||
fx := newFixture(t)
|
||||
st := fx.sb.Doc.NewState()
|
||||
st.SetDetail(bundle.RelationKeyLayout.String(), pbtypes.Int64(int64(model.ObjectType_collection)))
|
||||
fx.sb.Doc = st
|
||||
fx.pickerFx.EXPECT().GetObject(context.Background(), "root").Return(fx, nil)
|
||||
fx.mockSender.EXPECT().Broadcast(mock.Anything).Return()
|
||||
mockService := mock_fileobject.NewMockService(t)
|
||||
mockService.EXPECT().Create(context.Background(), "", mock.Anything).Return("fileObjectId", &types.Struct{Fields: map[string]*types.Value{}}, nil).Maybe()
|
||||
fx.fileUploaderFactory = prepareFileService(t, fx.mockSender, mockService)
|
||||
|
||||
// when
|
||||
proc := &dropFilesProcess{
|
||||
spaceID: fx.SpaceID(),
|
||||
processService: fx.processService,
|
||||
picker: fx.picker,
|
||||
fileUploaderFactory: fx.fileUploaderFactory,
|
||||
}
|
||||
err = proc.Init([]string{file.Name()})
|
||||
assert.Nil(t, err)
|
||||
var ch = make(chan error)
|
||||
proc.Start(fx, "", model.Block_Bottom, ch)
|
||||
err = <-ch
|
||||
|
||||
// then
|
||||
assert.Nil(t, err)
|
||||
storeSlice := fx.NewState().GetStoreSlice(template.CollectionStoreKey)
|
||||
assert.Len(t, storeSlice, 1)
|
||||
assert.Equal(t, "fileObjectId", storeSlice[0])
|
||||
})
|
||||
t.Run("drop dir with file in collection - success", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
_, err := os.Create(filepath.Join(dir, "test"))
|
||||
assert.Nil(t, err)
|
||||
|
||||
fx := newFixture(t)
|
||||
st := fx.sb.Doc.NewState()
|
||||
st.SetDetail(bundle.RelationKeyLayout.String(), pbtypes.Int64(int64(model.ObjectType_collection)))
|
||||
fx.sb.Doc = st
|
||||
fx.pickerFx.EXPECT().GetObject(context.Background(), "root").Return(fx, nil)
|
||||
fx.mockSender.EXPECT().Broadcast(mock.Anything).Return()
|
||||
mockService := mock_fileobject.NewMockService(t)
|
||||
mockService.EXPECT().Create(context.Background(), "", mock.Anything).Return("fileObjectId", &types.Struct{Fields: map[string]*types.Value{}}, nil).Maybe()
|
||||
fx.fileUploaderFactory = prepareFileService(t, fx.mockSender, mockService)
|
||||
|
||||
// when
|
||||
proc := &dropFilesProcess{
|
||||
spaceID: fx.SpaceID(),
|
||||
processService: fx.processService,
|
||||
picker: fx.picker,
|
||||
fileUploaderFactory: fx.fileUploaderFactory,
|
||||
}
|
||||
err = proc.Init([]string{dir})
|
||||
assert.Nil(t, err)
|
||||
var ch = make(chan error)
|
||||
proc.Start(fx, "", model.Block_Bottom, ch)
|
||||
err = <-ch
|
||||
|
||||
// then
|
||||
assert.Nil(t, err)
|
||||
storeSlice := fx.NewState().GetStoreSlice(template.CollectionStoreKey)
|
||||
assert.Len(t, storeSlice, 1)
|
||||
assert.Equal(t, "fileObjectId", storeSlice[0])
|
||||
})
|
||||
}
|
||||
|
||||
func prepareFileService(t *testing.T, sender *mock_event.MockSender, fileObjectService *mock_fileobject.MockService) fileuploader.Service {
|
||||
dataStoreProvider, err := datastore.NewInMemory()
|
||||
assert.Nil(t, err)
|
||||
|
||||
blockStorage := filestorage.NewInMemory()
|
||||
|
||||
rpcStore := rpcstore.NewInMemoryStore(1024)
|
||||
rpcStoreService := rpcstore.NewInMemoryService(rpcStore)
|
||||
commonFileService := fileservice.New()
|
||||
fileSyncService := filesync.New()
|
||||
objectStore := objectstore.NewStoreFixture(t)
|
||||
|
||||
ctx := context.Background()
|
||||
ctrl := gomock.NewController(t)
|
||||
wallet := mock_wallet.NewMockWallet(t)
|
||||
wallet.EXPECT().Name().Return(wallet2.CName)
|
||||
wallet.EXPECT().RepoPath().Return("repo/path")
|
||||
|
||||
a := new(app.App)
|
||||
a.Register(dataStoreProvider)
|
||||
a.Register(filestore.New())
|
||||
a.Register(commonFileService)
|
||||
a.Register(fileSyncService)
|
||||
a.Register(testutil.PrepareMock(ctx, a, sender))
|
||||
a.Register(blockStorage)
|
||||
a.Register(objectStore)
|
||||
a.Register(rpcStoreService)
|
||||
a.Register(testutil.PrepareMock(ctx, a, mock_accountservice.NewMockService(ctrl)))
|
||||
a.Register(testutil.PrepareMock(ctx, a, wallet))
|
||||
a.Register(testutil.PrepareMock(ctx, a, fileObjectService))
|
||||
a.Register(&config.Config{DisableFileConfig: true, NetworkMode: pb.RpcAccount_DefaultConfig, PeferYamuxTransport: true})
|
||||
a.Register(core.NewTempDirService())
|
||||
a.Register(testutil.PrepareMock(ctx, a, mock_cache.NewMockObjectGetterComponent(t)))
|
||||
a.Register(files.New())
|
||||
err = a.Start(ctx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
service := fileuploader.New()
|
||||
err = service.Init(a)
|
||||
assert.Nil(t, err)
|
||||
return service
|
||||
}
|
||||
|
|
|
@ -55,6 +55,13 @@ func (p *participant) Init(ctx *smartblock.InitContext) (err error) {
|
|||
ctx.State.SetDetailAndBundledRelation(bundle.RelationKeyLayout, pbtypes.Int64(int64(model.ObjectType_participant)))
|
||||
ctx.State.SetDetailAndBundledRelation(bundle.RelationKeyLayoutAlign, pbtypes.Int64(int64(model.Block_AlignCenter)))
|
||||
|
||||
records, err := p.objectStore.QueryByIds([]string{p.Id()})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(records) > 0 {
|
||||
ctx.State.SetDetails(records[0].Details)
|
||||
}
|
||||
template.InitTemplate(ctx.State,
|
||||
template.WithEmpty,
|
||||
template.WithTitle,
|
||||
|
@ -63,15 +70,6 @@ func (p *participant) Init(ctx *smartblock.InitContext) (err error) {
|
|||
template.WithAddedFeaturedRelation(bundle.RelationKeyType),
|
||||
template.WithAddedFeaturedRelation(bundle.RelationKeyBacklinks),
|
||||
)
|
||||
|
||||
records, err := p.objectStore.QueryByIds([]string{p.Id()})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(records) > 0 {
|
||||
ctx.State.SetDetails(records[0].Details)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -3,11 +3,13 @@ package editor
|
|||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/basic"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/smartblock"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/smartblock/smarttest"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/state"
|
||||
"github.com/anyproto/anytype-heart/core/block/migration"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
|
@ -111,6 +113,68 @@ func TestParticipant_ModifyIdentityDetails(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParticipant_Init(t *testing.T) {
|
||||
t.Run("title block not empty, because name detail is in store", func(t *testing.T) {
|
||||
// given
|
||||
sb := smarttest.New("root")
|
||||
store := newStoreFixture(t)
|
||||
store.AddObjects(t, []objectstore.TestObject{{
|
||||
bundle.RelationKeySpaceId: pbtypes.String("spaceId"),
|
||||
bundle.RelationKeyId: pbtypes.String("root"),
|
||||
bundle.RelationKeyName: pbtypes.String("test"),
|
||||
}})
|
||||
|
||||
basicComponent := basic.NewBasic(sb, store, nil, nil, nil)
|
||||
p := &participant{
|
||||
SmartBlock: sb,
|
||||
DetailsUpdatable: basicComponent,
|
||||
objectStore: store,
|
||||
}
|
||||
|
||||
initCtx := &smartblock.InitContext{
|
||||
IsNewObject: true,
|
||||
}
|
||||
|
||||
// when
|
||||
err := p.Init(initCtx)
|
||||
assert.NoError(t, err)
|
||||
migration.RunMigrations(p, initCtx)
|
||||
err = p.Apply(initCtx.State)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, p.NewState().Get(state.TitleBlockID))
|
||||
assert.Equal(t, "test", p.NewState().Get(state.TitleBlockID).Model().GetText().GetText())
|
||||
})
|
||||
t.Run("title block is empty", func(t *testing.T) {
|
||||
// given
|
||||
sb := smarttest.New("root")
|
||||
store := newStoreFixture(t)
|
||||
|
||||
basicComponent := basic.NewBasic(sb, store, nil, nil, nil)
|
||||
p := &participant{
|
||||
SmartBlock: sb,
|
||||
DetailsUpdatable: basicComponent,
|
||||
objectStore: store,
|
||||
}
|
||||
|
||||
initCtx := &smartblock.InitContext{
|
||||
IsNewObject: true,
|
||||
}
|
||||
|
||||
// when
|
||||
err := p.Init(initCtx)
|
||||
assert.NoError(t, err)
|
||||
migration.RunMigrations(p, initCtx)
|
||||
err = p.Apply(initCtx.State)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, p.NewState().Get(state.TitleBlockID))
|
||||
assert.Equal(t, "", p.NewState().Get(state.TitleBlockID).Model().GetText().GetText())
|
||||
})
|
||||
}
|
||||
|
||||
func newStoreFixture(t *testing.T) *spaceindex.StoreFixture {
|
||||
store := spaceindex.NewStoreFixture(t)
|
||||
|
||||
|
|
|
@ -46,6 +46,7 @@ import (
|
|||
"github.com/anyproto/anytype-heart/pkg/lib/threads"
|
||||
"github.com/anyproto/anytype-heart/space/spacecore/storage/sqlitestorage"
|
||||
"github.com/anyproto/anytype-heart/util/anonymize"
|
||||
"github.com/anyproto/anytype-heart/util/dateutil"
|
||||
"github.com/anyproto/anytype-heart/util/internalflag"
|
||||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
"github.com/anyproto/anytype-heart/util/slice"
|
||||
|
@ -461,10 +462,7 @@ func (sb *smartBlock) fetchMeta() (details []*model.ObjectViewDetailsSet, err er
|
|||
depIds := sb.dependentSmartIds(sb.includeRelationObjectsAsDependents, true, true)
|
||||
sb.setDependentIDs(depIds)
|
||||
|
||||
perSpace, err := sb.partitionIdsBySpace(sb.depIds)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("partiton by space: %w", err)
|
||||
}
|
||||
perSpace := sb.partitionIdsBySpace(sb.depIds)
|
||||
|
||||
recordsCh := make(chan *types.Struct, 10)
|
||||
sb.recordsSub = database.NewSubscription(nil, recordsCh)
|
||||
|
@ -513,9 +511,14 @@ func (sb *smartBlock) fetchMeta() (details []*model.ObjectViewDetailsSet, err er
|
|||
return
|
||||
}
|
||||
|
||||
func (sb *smartBlock) partitionIdsBySpace(ids []string) (map[string][]string, error) {
|
||||
func (sb *smartBlock) partitionIdsBySpace(ids []string) map[string][]string {
|
||||
perSpace := map[string][]string{}
|
||||
for _, id := range ids {
|
||||
if _, parseErr := dateutil.ParseDateId(id); parseErr == nil {
|
||||
perSpace[sb.space.Id()] = append(perSpace[sb.space.Id()], id)
|
||||
continue
|
||||
}
|
||||
|
||||
spaceId, err := sb.spaceIdResolver.ResolveSpaceID(id)
|
||||
if errors.Is(err, sqlitestorage.ErrObjectNotFound) || errors.Is(err, badger.ErrKeyNotFound) {
|
||||
perSpace[sb.space.Id()] = append(perSpace[sb.space.Id()], id)
|
||||
|
@ -529,7 +532,7 @@ func (sb *smartBlock) partitionIdsBySpace(ids []string) (map[string][]string, er
|
|||
}
|
||||
perSpace[spaceId] = append(perSpace[spaceId], id)
|
||||
}
|
||||
return perSpace, nil
|
||||
return perSpace
|
||||
}
|
||||
|
||||
func (sb *smartBlock) Lock() {
|
||||
|
@ -861,10 +864,7 @@ func (sb *smartBlock) CheckSubscriptions() (changed bool) {
|
|||
}
|
||||
newIDs := sb.recordsSub.Subscribe(sb.depIds)
|
||||
|
||||
perSpace, err := sb.partitionIdsBySpace(newIDs)
|
||||
if err != nil {
|
||||
log.Errorf("partiton by space error: %v", err)
|
||||
}
|
||||
perSpace := sb.partitionIdsBySpace(newIDs)
|
||||
|
||||
for spaceId, ids := range perSpace {
|
||||
spaceIndex := sb.objectStore.SpaceIndex(spaceId)
|
||||
|
|
|
@ -253,6 +253,7 @@ var workspaceKeysToCopy = []string{
|
|||
bundle.RelationKeyIconOption.String(),
|
||||
bundle.RelationKeySpaceDashboardId.String(),
|
||||
bundle.RelationKeyCreatedDate.String(),
|
||||
bundle.RelationKeyChatId.String(),
|
||||
}
|
||||
|
||||
func (s *SpaceView) GetSpaceDescription() (data spaceinfo.SpaceDescription) {
|
||||
|
|
|
@ -1330,8 +1330,9 @@ func (s *State) Copy() *State {
|
|||
}
|
||||
|
||||
func (s *State) HasRelation(key string) bool {
|
||||
for _, rel := range s.relationLinks {
|
||||
if rel.Key == key {
|
||||
links := s.GetRelationLinks()
|
||||
for _, link := range links {
|
||||
if link.Key == key {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -103,9 +103,9 @@ func (e *export) Name() (name string) {
|
|||
|
||||
func (e *export) Export(ctx context.Context, req pb.RpcObjectListExportRequest) (path string, succeed int, err error) {
|
||||
queue := e.processService.NewQueue(pb.ModelProcess{
|
||||
Id: bson.NewObjectId().Hex(),
|
||||
Type: pb.ModelProcess_Export,
|
||||
State: 0,
|
||||
Id: bson.NewObjectId().Hex(),
|
||||
State: 0,
|
||||
Message: &pb.ModelProcessMessageOfExport{Export: &pb.ModelProcessExport{}},
|
||||
}, 4)
|
||||
queue.SetMessage("prepare")
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ func (s *Service) DownloadFile(ctx context.Context, req *pb.RpcFileDownloadReque
|
|||
if err != nil {
|
||||
return "", fmt.Errorf("mkdir -p: %w", anyerror.CleanupError(err))
|
||||
}
|
||||
progress := process.NewProgress(pb.ModelProcess_SaveFile)
|
||||
progress := process.NewProgress(&pb.ModelProcessMessageOfSaveFile{SaveFile: &pb.ModelProcessSaveFile{}})
|
||||
defer progress.Finish(nil)
|
||||
|
||||
err = s.ProcessAdd(progress)
|
||||
|
|
|
@ -7,15 +7,29 @@ import (
|
|||
|
||||
"github.com/anyproto/any-sync/commonspace/object/acl/list"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/source"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
)
|
||||
|
||||
var ErrCancel = fmt.Errorf("import is canceled")
|
||||
var ErrFailedToReceiveListOfObjects = fmt.Errorf("failed to receive the list of objects")
|
||||
var ErrNoObjectsToImport = fmt.Errorf("source path doesn't contain objects to import")
|
||||
var ErrLimitExceeded = fmt.Errorf("Limit of relations or objects are exceeded ")
|
||||
var ErrFileLoad = fmt.Errorf("file was not synced")
|
||||
var (
|
||||
ErrCancel = errors.New("import is canceled")
|
||||
ErrCsvLimitExceeded = errors.New("Limit of relations or objects are exceeded ")
|
||||
ErrFileLoad = errors.New("file was not synced")
|
||||
|
||||
ErrNoObjectInIntegration = errors.New("no objects added to Notion integration")
|
||||
ErrNotionServerIsUnavailable = errors.New("notion server is unavailable")
|
||||
ErrNotionServerExceedRateLimit = errors.New("rate limit exceeded")
|
||||
|
||||
ErrFileImportNoObjectsInZipArchive = errors.New("no objects in zip archive")
|
||||
ErrFileImportNoObjectsInDirectory = errors.New("no objects in directory")
|
||||
|
||||
ErrPbNotAnyBlockFormat = errors.New("file doesn't match Anyblock format ")
|
||||
|
||||
ErrWrongHTMLFormat = errors.New("html file has wrong structure")
|
||||
|
||||
ErrNoSnapshotToImport = errors.New("no snapshot to import") // for external import
|
||||
)
|
||||
|
||||
type ConvertError struct {
|
||||
errors []error
|
||||
|
@ -72,23 +86,15 @@ func (ce *ConvertError) GetResultError(importType model.ImportType) error {
|
|||
var countNoObjectsToImport int
|
||||
for _, e := range ce.errors {
|
||||
switch {
|
||||
case errors.Is(e, ErrCancel):
|
||||
return fmt.Errorf("import type: %s: %w", importType.String(), ErrCancel)
|
||||
case errors.Is(e, ErrLimitExceeded):
|
||||
return fmt.Errorf("import type: %s: %w", importType.String(), ErrLimitExceeded)
|
||||
case errors.Is(e, ErrFailedToReceiveListOfObjects):
|
||||
return ErrFailedToReceiveListOfObjects
|
||||
case errors.Is(e, ErrFileLoad):
|
||||
case isDefinedError(e):
|
||||
return fmt.Errorf("import type: %s: %w", importType.String(), e)
|
||||
case errors.Is(e, list.ErrInsufficientPermissions):
|
||||
return e
|
||||
case errors.Is(e, ErrNoObjectsToImport):
|
||||
case IsNoObjectError(e):
|
||||
countNoObjectsToImport++
|
||||
}
|
||||
}
|
||||
// we return ErrNoObjectsToImport only if all paths has such error, otherwise we assume that import finished with internal code error
|
||||
if countNoObjectsToImport == len(ce.errors) {
|
||||
return fmt.Errorf("import type: %s: %w", importType.String(), ErrNoObjectsToImport)
|
||||
return fmt.Errorf("import type: %s: %w", importType.String(), ce.errors[0])
|
||||
}
|
||||
return fmt.Errorf("import type: %s: %w", importType.String(), ce.Error())
|
||||
}
|
||||
|
@ -99,7 +105,7 @@ func (ce *ConvertError) IsNoObjectToImportError(importPathsCount int) bool {
|
|||
}
|
||||
var countNoObjectsToImport int
|
||||
for _, err := range ce.errors {
|
||||
if errors.Is(err, ErrNoObjectsToImport) {
|
||||
if IsNoObjectError(err) {
|
||||
countNoObjectsToImport++
|
||||
}
|
||||
}
|
||||
|
@ -108,23 +114,37 @@ func (ce *ConvertError) IsNoObjectToImportError(importPathsCount int) bool {
|
|||
func (ce *ConvertError) ShouldAbortImport(pathsCount int, importType model.ImportType) bool {
|
||||
return !ce.IsEmpty() && ce.mode == pb.RpcObjectImportRequest_ALL_OR_NOTHING ||
|
||||
ce.IsNoObjectToImportError(pathsCount) ||
|
||||
errors.Is(ce.GetResultError(importType), ErrLimitExceeded) ||
|
||||
errors.Is(ce.GetResultError(importType), ErrCsvLimitExceeded) ||
|
||||
errors.Is(ce.GetResultError(importType), ErrCancel)
|
||||
}
|
||||
|
||||
func GetImportErrorCode(err error) model.ImportErrorCode {
|
||||
func GetImportNotificationErrorCode(err error) model.ImportErrorCode {
|
||||
if err == nil {
|
||||
return model.Import_NULL
|
||||
}
|
||||
switch {
|
||||
case errors.Is(err, ErrNoObjectsToImport):
|
||||
return model.Import_NO_OBJECTS_TO_IMPORT
|
||||
case errors.Is(err, ErrNoObjectInIntegration) ||
|
||||
errors.Is(err, ErrFileImportNoObjectsInDirectory) ||
|
||||
errors.Is(err, ErrFileImportNoObjectsInZipArchive): // support existing protocol
|
||||
return model.Import_NOTION_NO_OBJECTS_IN_INTEGRATION
|
||||
case errors.Is(err, ErrNotionServerIsUnavailable):
|
||||
return model.Import_NOTION_SERVER_IS_UNAVAILABLE
|
||||
case errors.Is(err, ErrNotionServerExceedRateLimit):
|
||||
return model.Import_NOTION_RATE_LIMIT_EXCEEDED
|
||||
case errors.Is(err, ErrFileImportNoObjectsInDirectory):
|
||||
return model.Import_FILE_IMPORT_NO_OBJECTS_IN_DIRECTORY
|
||||
case errors.Is(err, ErrFileImportNoObjectsInZipArchive):
|
||||
return model.Import_FILE_IMPORT_NO_OBJECTS_IN_ZIP_ARCHIVE
|
||||
case errors.Is(err, ErrPbNotAnyBlockFormat):
|
||||
return model.Import_PB_NOT_ANYBLOCK_FORMAT
|
||||
case errors.Is(err, ErrCancel):
|
||||
return model.Import_IMPORT_IS_CANCELED
|
||||
case errors.Is(err, ErrLimitExceeded):
|
||||
return model.Import_LIMIT_OF_ROWS_OR_RELATIONS_EXCEEDED
|
||||
case errors.Is(err, ErrCsvLimitExceeded):
|
||||
return model.Import_CSV_LIMIT_OF_ROWS_OR_RELATIONS_EXCEEDED
|
||||
case errors.Is(err, ErrFileLoad):
|
||||
return model.Import_FILE_LOAD_ERROR
|
||||
case errors.Is(err, ErrWrongHTMLFormat):
|
||||
return model.Import_HTML_WRONG_HTML_STRUCTURE
|
||||
case errors.Is(err, list.ErrInsufficientPermissions):
|
||||
return model.Import_INSUFFICIENT_PERMISSIONS
|
||||
default:
|
||||
|
@ -132,6 +152,28 @@ func GetImportErrorCode(err error) model.ImportErrorCode {
|
|||
}
|
||||
}
|
||||
|
||||
func ErrorBySourceType(s source.Source) error {
|
||||
if _, ok := s.(*source.Directory); ok {
|
||||
return ErrFileImportNoObjectsInDirectory
|
||||
}
|
||||
if _, ok := s.(*source.Zip); ok {
|
||||
return ErrFileImportNoObjectsInZipArchive
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func IsNoObjectError(err error) bool {
|
||||
return errors.Is(err, ErrNoObjectInIntegration) ||
|
||||
errors.Is(err, ErrFileImportNoObjectsInDirectory) ||
|
||||
errors.Is(err, ErrFileImportNoObjectsInZipArchive)
|
||||
}
|
||||
|
||||
func isDefinedError(err error) bool {
|
||||
return errors.Is(err, ErrCancel) || errors.Is(err, ErrCsvLimitExceeded) || errors.Is(err, ErrNotionServerExceedRateLimit) ||
|
||||
errors.Is(err, ErrNotionServerIsUnavailable) || errors.Is(err, ErrFileLoad) || errors.Is(err, ErrPbNotAnyBlockFormat) ||
|
||||
errors.Is(err, ErrWrongHTMLFormat)
|
||||
}
|
||||
|
||||
func GetGalleryResponseCode(err error) pb.RpcObjectImportExperienceResponseErrorCode {
|
||||
if err == nil {
|
||||
return pb.RpcObjectImportExperienceResponseError_NULL
|
||||
|
|
351
core/block/import/common/error_test.go
Normal file
351
core/block/import/common/error_test.go
Normal file
|
@ -0,0 +1,351 @@
|
|||
package common
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/anyproto/any-sync/commonspace/object/acl/list"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/source"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
)
|
||||
|
||||
func TestConvertError(t *testing.T) {
|
||||
t.Run("new error from existing", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrCancel
|
||||
|
||||
// when
|
||||
ce := NewFromError(err, pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
|
||||
// then
|
||||
assert.Len(t, ce.errors, 1)
|
||||
assert.Equal(t, err, ce.errors[0])
|
||||
})
|
||||
t.Run("add", func(t *testing.T) {
|
||||
// given
|
||||
ce := NewError(pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
err := fmt.Errorf("another error")
|
||||
|
||||
// when
|
||||
ce.Add(err)
|
||||
|
||||
// then
|
||||
assert.Len(t, ce.errors, 1)
|
||||
assert.Equal(t, err, ce.errors[0])
|
||||
})
|
||||
t.Run("merge", func(t *testing.T) {
|
||||
// given
|
||||
ce1 := NewError(pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
ce1.Add(fmt.Errorf("error1"))
|
||||
|
||||
ce2 := NewError(pb.RpcObjectImportRequest_ALL_OR_NOTHING)
|
||||
ce2.Add(fmt.Errorf("error2"))
|
||||
|
||||
// when
|
||||
ce1.Merge(ce2)
|
||||
|
||||
// then
|
||||
assert.Len(t, ce1.errors, 2)
|
||||
assert.Equal(t, "error1", ce1.errors[0].Error())
|
||||
assert.Equal(t, "error2", ce1.errors[1].Error())
|
||||
})
|
||||
t.Run("ShouldAbortImport", func(t *testing.T) {
|
||||
// given
|
||||
ce := NewError(pb.RpcObjectImportRequest_ALL_OR_NOTHING)
|
||||
|
||||
// when
|
||||
ce.Add(ErrCancel)
|
||||
|
||||
// then
|
||||
assert.True(t, ce.ShouldAbortImport(1, model.Import_Notion))
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetImportNotificationErrorCode(t *testing.T) {
|
||||
t.Run("GetImportNotificationErrorCode - NOTION_NO_OBJECTS_IN_INTEGRATION", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrNoObjectInIntegration
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_NOTION_NO_OBJECTS_IN_INTEGRATION, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - IMPORT_IS_CANCELED", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrCancel
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_IMPORT_IS_CANCELED, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - INTERNAL_ERROR", func(t *testing.T) {
|
||||
// given
|
||||
err := errors.New("some random error")
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_INTERNAL_ERROR, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - nil", func(t *testing.T) {
|
||||
// given
|
||||
var err error
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_NULL, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - notion server is unavailable", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrNotionServerIsUnavailable
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_NOTION_SERVER_IS_UNAVAILABLE, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - notion server exceeded limit", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrNotionServerExceedRateLimit
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_NOTION_RATE_LIMIT_EXCEEDED, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - no objects in dir", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrFileImportNoObjectsInDirectory
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_NOTION_NO_OBJECTS_IN_INTEGRATION, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - no objects in zip", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrFileImportNoObjectsInZipArchive
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_NOTION_NO_OBJECTS_IN_INTEGRATION, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - not any block format", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrPbNotAnyBlockFormat
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_PB_NOT_ANYBLOCK_FORMAT, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - csv limit exceeded", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrCsvLimitExceeded
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_CSV_LIMIT_OF_ROWS_OR_RELATIONS_EXCEEDED, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - wrong html", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrWrongHTMLFormat
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_HTML_WRONG_HTML_STRUCTURE, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - insufficient permissions", func(t *testing.T) {
|
||||
// given
|
||||
err := list.ErrInsufficientPermissions
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_INSUFFICIENT_PERMISSIONS, code)
|
||||
})
|
||||
t.Run("GetImportNotificationErrorCode - file load error", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrFileLoad
|
||||
|
||||
// when
|
||||
code := GetImportNotificationErrorCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, model.Import_FILE_LOAD_ERROR, code)
|
||||
})
|
||||
}
|
||||
|
||||
func TestError(t *testing.T) {
|
||||
t.Run("error is not empty", func(t *testing.T) {
|
||||
// given
|
||||
ce := NewError(pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
err1 := fmt.Errorf("error 1")
|
||||
err2 := fmt.Errorf("error 2")
|
||||
ce.Add(err1)
|
||||
ce.Add(err2)
|
||||
|
||||
// when
|
||||
actual := ce.Error().Error()
|
||||
|
||||
// then
|
||||
expected := "error: error 1\nerror: error 2\n"
|
||||
assert.Equal(t, expected, actual)
|
||||
})
|
||||
t.Run("error is empty", func(t *testing.T) {
|
||||
// given
|
||||
ce := NewError(pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
|
||||
// when
|
||||
actual := ce.Error()
|
||||
|
||||
// then
|
||||
assert.Nil(t, actual)
|
||||
})
|
||||
}
|
||||
|
||||
func TestConvertError_GetResultError(t *testing.T) {
|
||||
t.Run("get result error", func(t *testing.T) {
|
||||
// given
|
||||
ce := NewError(pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
|
||||
// when
|
||||
ce.Add(ErrCancel)
|
||||
|
||||
// then
|
||||
result := ce.GetResultError(model.Import_Notion)
|
||||
assert.ErrorIs(t, result, ErrCancel)
|
||||
assert.EqualError(t, result, "import type: Notion: import is canceled")
|
||||
})
|
||||
t.Run("get result error - error is empty", func(t *testing.T) {
|
||||
// given
|
||||
ce := NewError(pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
|
||||
// when
|
||||
result := ce.GetResultError(model.Import_Notion)
|
||||
|
||||
// then
|
||||
assert.Nil(t, result)
|
||||
})
|
||||
t.Run("get result error - no object error", func(t *testing.T) {
|
||||
// given
|
||||
ce := NewError(pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
|
||||
// when
|
||||
ce.Add(ErrFileImportNoObjectsInDirectory)
|
||||
ce.Add(ErrFileImportNoObjectsInZipArchive)
|
||||
result := ce.GetResultError(model.Import_Notion)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, result)
|
||||
})
|
||||
}
|
||||
|
||||
func TestIsNoObjectError(t *testing.T) {
|
||||
t.Run("IsNoObjectError - random error", func(t *testing.T) {
|
||||
// given
|
||||
err := errors.New("some random error")
|
||||
|
||||
// when
|
||||
result := IsNoObjectError(err)
|
||||
|
||||
// then
|
||||
assert.False(t, result)
|
||||
})
|
||||
t.Run("IsNoObjectToImportError", func(t *testing.T) {
|
||||
// given
|
||||
ce := NewError(pb.RpcObjectImportRequest_ALL_OR_NOTHING)
|
||||
|
||||
// when
|
||||
ce.Add(ErrFileImportNoObjectsInDirectory)
|
||||
ce.Add(ErrFileImportNoObjectsInZipArchive)
|
||||
|
||||
// then
|
||||
assert.True(t, ce.IsNoObjectToImportError(2))
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetGalleryResponseCode(t *testing.T) {
|
||||
t.Run("GetGalleryResponseCode - no error", func(t *testing.T) {
|
||||
// given
|
||||
var err error
|
||||
|
||||
// when
|
||||
code := GetGalleryResponseCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, pb.RpcObjectImportExperienceResponseError_NULL, code)
|
||||
})
|
||||
t.Run("GetGalleryResponseCode - internal error", func(t *testing.T) {
|
||||
// given
|
||||
err := ErrCancel
|
||||
|
||||
// when
|
||||
code := GetGalleryResponseCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, pb.RpcObjectImportExperienceResponseError_UNKNOWN_ERROR, code)
|
||||
})
|
||||
t.Run("GetGalleryResponseCode - insufficient permission error", func(t *testing.T) {
|
||||
// given
|
||||
err := list.ErrInsufficientPermissions
|
||||
|
||||
// when
|
||||
code := GetGalleryResponseCode(err)
|
||||
|
||||
// then
|
||||
assert.Equal(t, pb.RpcObjectImportExperienceResponseError_INSUFFICIENT_PERMISSION, code)
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetNoObjectErrorBySourceType(t *testing.T) {
|
||||
t.Run("source is directory", func(t *testing.T) {
|
||||
// given
|
||||
dirSource := &source.Directory{}
|
||||
|
||||
// when
|
||||
err := ErrorBySourceType(dirSource)
|
||||
|
||||
// then
|
||||
assert.ErrorIs(t, err, ErrFileImportNoObjectsInDirectory)
|
||||
})
|
||||
t.Run("source is zip", func(t *testing.T) {
|
||||
// given
|
||||
zipSource := &source.Zip{}
|
||||
|
||||
// when
|
||||
err := ErrorBySourceType(zipSource)
|
||||
|
||||
// then
|
||||
assert.ErrorIs(t, err, ErrFileImportNoObjectsInZipArchive)
|
||||
})
|
||||
t.Run("source is nil", func(t *testing.T) {
|
||||
// when
|
||||
err := ErrorBySourceType(nil)
|
||||
|
||||
// then
|
||||
assert.Nil(t, err)
|
||||
})
|
||||
}
|
|
@ -392,7 +392,7 @@ func (oc *ObjectCreator) resetState(newID string, st *state.State) *types.Struct
|
|||
func (oc *ObjectCreator) setFavorite(snapshot *model.SmartBlockSnapshotBase, newID string) {
|
||||
isFavorite := pbtypes.GetBool(snapshot.Details, bundle.RelationKeyIsFavorite.String())
|
||||
if isFavorite {
|
||||
err := oc.detailsService.SetIsFavorite(newID, true)
|
||||
err := oc.detailsService.SetIsFavorite(newID, true, false)
|
||||
if err != nil {
|
||||
log.With(zap.String("object id", newID)).Errorf("failed to set isFavorite when importing object: %s", err)
|
||||
}
|
||||
|
|
|
@ -66,11 +66,12 @@ func (s *FileSyncer) Sync(id domain.FullID, newIdsSet map[string]struct{}, b sim
|
|||
BlockId: b.Model().Id,
|
||||
}
|
||||
}
|
||||
params.ContextId = id.ObjectID
|
||||
dto := block.UploadRequest{
|
||||
RpcBlockUploadRequest: params,
|
||||
ObjectOrigin: origin,
|
||||
}
|
||||
_, err := s.service.UploadFileBlock(id.ObjectID, dto)
|
||||
_, err := s.service.UploadBlockFile(nil, dto, "", false)
|
||||
if os.IsNotExist(err) {
|
||||
return anyerror.CleanupError(err)
|
||||
}
|
||||
|
|
|
@ -74,14 +74,15 @@ func TestFileSyncer_Sync(t *testing.T) {
|
|||
simpleBlock := simple.New(block)
|
||||
service := mock_syncer.NewMockBlockService(t)
|
||||
params := pb.RpcBlockUploadRequest{
|
||||
FilePath: "not exist",
|
||||
BlockId: simpleBlock.Model().GetId(),
|
||||
ContextId: id.ObjectID,
|
||||
FilePath: "not exist",
|
||||
BlockId: simpleBlock.Model().GetId(),
|
||||
}
|
||||
dto := block2.UploadRequest{
|
||||
RpcBlockUploadRequest: params,
|
||||
ObjectOrigin: objectorigin.Import(model.Import_Pb),
|
||||
}
|
||||
service.EXPECT().UploadFileBlock(id.ObjectID, dto).Return("", os.ErrNotExist)
|
||||
service.EXPECT().UploadBlockFile(nil, dto, "", false).Return("", os.ErrNotExist)
|
||||
syncer := NewFileSyncer(service, nil)
|
||||
|
||||
// when
|
||||
|
@ -104,14 +105,15 @@ func TestFileSyncer_Sync(t *testing.T) {
|
|||
simpleBlock := simple.New(block)
|
||||
service := mock_syncer.NewMockBlockService(t)
|
||||
params := pb.RpcBlockUploadRequest{
|
||||
FilePath: "not exist",
|
||||
BlockId: simpleBlock.Model().GetId(),
|
||||
ContextId: id.ObjectID,
|
||||
FilePath: "not exist",
|
||||
BlockId: simpleBlock.Model().GetId(),
|
||||
}
|
||||
dto := block2.UploadRequest{
|
||||
RpcBlockUploadRequest: params,
|
||||
ObjectOrigin: objectorigin.Import(model.Import_Pb),
|
||||
}
|
||||
service.EXPECT().UploadFileBlock(id.ObjectID, dto).Return("", fmt.Errorf("new error"))
|
||||
service.EXPECT().UploadBlockFile(nil, dto, "", false).Return("", fmt.Errorf("new error"))
|
||||
syncer := NewFileSyncer(service, nil)
|
||||
|
||||
// when
|
||||
|
@ -134,14 +136,15 @@ func TestFileSyncer_Sync(t *testing.T) {
|
|||
simpleBlock := simple.New(block)
|
||||
service := mock_syncer.NewMockBlockService(t)
|
||||
params := pb.RpcBlockUploadRequest{
|
||||
FilePath: "exist",
|
||||
BlockId: simpleBlock.Model().GetId(),
|
||||
ContextId: id.ObjectID,
|
||||
FilePath: "exist",
|
||||
BlockId: simpleBlock.Model().GetId(),
|
||||
}
|
||||
dto := block2.UploadRequest{
|
||||
RpcBlockUploadRequest: params,
|
||||
ObjectOrigin: objectorigin.Import(model.Import_Pb),
|
||||
}
|
||||
service.EXPECT().UploadFileBlock(id.ObjectID, dto).Return("fileId", nil)
|
||||
service.EXPECT().UploadBlockFile(nil, dto, "", false).Return("fileId", nil)
|
||||
syncer := NewFileSyncer(service, nil)
|
||||
|
||||
// when
|
||||
|
@ -163,14 +166,15 @@ func TestFileSyncer_Sync(t *testing.T) {
|
|||
simpleBlock := simple.New(block)
|
||||
service := mock_syncer.NewMockBlockService(t)
|
||||
params := pb.RpcBlockUploadRequest{
|
||||
Url: "http://example.com",
|
||||
BlockId: simpleBlock.Model().GetId(),
|
||||
ContextId: id.ObjectID,
|
||||
Url: "http://example.com",
|
||||
BlockId: simpleBlock.Model().GetId(),
|
||||
}
|
||||
dto := block2.UploadRequest{
|
||||
RpcBlockUploadRequest: params,
|
||||
ObjectOrigin: objectorigin.Import(model.Import_Pb),
|
||||
}
|
||||
service.EXPECT().UploadFileBlock(id.ObjectID, dto).Return("fileId", nil)
|
||||
service.EXPECT().UploadBlockFile(nil, dto, "", false).Return("fileId", nil)
|
||||
syncer := NewFileSyncer(service, nil)
|
||||
|
||||
// when
|
||||
|
|
|
@ -20,6 +20,7 @@ import (
|
|||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/addr"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/logging"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
"github.com/anyproto/anytype-heart/util/anyerror"
|
||||
)
|
||||
|
||||
|
@ -92,7 +93,7 @@ func (s *IconSyncer) handleIconImage(spaceId string, newIdsSet map[string]struct
|
|||
return fileObjectId, nil
|
||||
}
|
||||
|
||||
req := pb.RpcFileUploadRequest{LocalPath: iconImage}
|
||||
req := pb.RpcFileUploadRequest{LocalPath: iconImage, ImageKind: model.ImageKind_Icon}
|
||||
if strings.HasPrefix(iconImage, "http://") || strings.HasPrefix(iconImage, "https://") {
|
||||
req = pb.RpcFileUploadRequest{Url: iconImage}
|
||||
}
|
||||
|
|
|
@ -11,6 +11,8 @@ import (
|
|||
|
||||
mock "github.com/stretchr/testify/mock"
|
||||
|
||||
session "github.com/anyproto/anytype-heart/core/session"
|
||||
|
||||
smartblock "github.com/anyproto/anytype-heart/core/block/editor/smartblock"
|
||||
|
||||
types "github.com/gogo/protobuf/types"
|
||||
|
@ -147,6 +149,65 @@ func (_c *MockBlockService_GetObjectByFullID_Call) RunAndReturn(run func(context
|
|||
return _c
|
||||
}
|
||||
|
||||
// UploadBlockFile provides a mock function with given fields: ctx, req, groupID, isSync
|
||||
func (_m *MockBlockService) UploadBlockFile(ctx session.Context, req block.UploadRequest, groupID string, isSync bool) (string, error) {
|
||||
ret := _m.Called(ctx, req, groupID, isSync)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for UploadBlockFile")
|
||||
}
|
||||
|
||||
var r0 string
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(session.Context, block.UploadRequest, string, bool) (string, error)); ok {
|
||||
return rf(ctx, req, groupID, isSync)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(session.Context, block.UploadRequest, string, bool) string); ok {
|
||||
r0 = rf(ctx, req, groupID, isSync)
|
||||
} else {
|
||||
r0 = ret.Get(0).(string)
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(session.Context, block.UploadRequest, string, bool) error); ok {
|
||||
r1 = rf(ctx, req, groupID, isSync)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// MockBlockService_UploadBlockFile_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadBlockFile'
|
||||
type MockBlockService_UploadBlockFile_Call struct {
|
||||
*mock.Call
|
||||
}
|
||||
|
||||
// UploadBlockFile is a helper method to define mock.On call
|
||||
// - ctx session.Context
|
||||
// - req block.UploadRequest
|
||||
// - groupID string
|
||||
// - isSync bool
|
||||
func (_e *MockBlockService_Expecter) UploadBlockFile(ctx interface{}, req interface{}, groupID interface{}, isSync interface{}) *MockBlockService_UploadBlockFile_Call {
|
||||
return &MockBlockService_UploadBlockFile_Call{Call: _e.mock.On("UploadBlockFile", ctx, req, groupID, isSync)}
|
||||
}
|
||||
|
||||
func (_c *MockBlockService_UploadBlockFile_Call) Run(run func(ctx session.Context, req block.UploadRequest, groupID string, isSync bool)) *MockBlockService_UploadBlockFile_Call {
|
||||
_c.Call.Run(func(args mock.Arguments) {
|
||||
run(args[0].(session.Context), args[1].(block.UploadRequest), args[2].(string), args[3].(bool))
|
||||
})
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockBlockService_UploadBlockFile_Call) Return(fileObjectId string, err error) *MockBlockService_UploadBlockFile_Call {
|
||||
_c.Call.Return(fileObjectId, err)
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockBlockService_UploadBlockFile_Call) RunAndReturn(run func(session.Context, block.UploadRequest, string, bool) (string, error)) *MockBlockService_UploadBlockFile_Call {
|
||||
_c.Call.Return(run)
|
||||
return _c
|
||||
}
|
||||
|
||||
// UploadFile provides a mock function with given fields: ctx, spaceId, req
|
||||
func (_m *MockBlockService) UploadFile(ctx context.Context, spaceId string, req block.FileUploadRequest) (string, *types.Struct, error) {
|
||||
ret := _m.Called(ctx, spaceId, req)
|
||||
|
@ -214,63 +275,6 @@ func (_c *MockBlockService_UploadFile_Call) RunAndReturn(run func(context.Contex
|
|||
return _c
|
||||
}
|
||||
|
||||
// UploadFileBlock provides a mock function with given fields: contextID, req
|
||||
func (_m *MockBlockService) UploadFileBlock(contextID string, req block.UploadRequest) (string, error) {
|
||||
ret := _m.Called(contextID, req)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for UploadFileBlock")
|
||||
}
|
||||
|
||||
var r0 string
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(string, block.UploadRequest) (string, error)); ok {
|
||||
return rf(contextID, req)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(string, block.UploadRequest) string); ok {
|
||||
r0 = rf(contextID, req)
|
||||
} else {
|
||||
r0 = ret.Get(0).(string)
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(string, block.UploadRequest) error); ok {
|
||||
r1 = rf(contextID, req)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// MockBlockService_UploadFileBlock_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadFileBlock'
|
||||
type MockBlockService_UploadFileBlock_Call struct {
|
||||
*mock.Call
|
||||
}
|
||||
|
||||
// UploadFileBlock is a helper method to define mock.On call
|
||||
// - contextID string
|
||||
// - req block.UploadRequest
|
||||
func (_e *MockBlockService_Expecter) UploadFileBlock(contextID interface{}, req interface{}) *MockBlockService_UploadFileBlock_Call {
|
||||
return &MockBlockService_UploadFileBlock_Call{Call: _e.mock.On("UploadFileBlock", contextID, req)}
|
||||
}
|
||||
|
||||
func (_c *MockBlockService_UploadFileBlock_Call) Run(run func(contextID string, req block.UploadRequest)) *MockBlockService_UploadFileBlock_Call {
|
||||
_c.Call.Run(func(args mock.Arguments) {
|
||||
run(args[0].(string), args[1].(block.UploadRequest))
|
||||
})
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockBlockService_UploadFileBlock_Call) Return(fileObjectId string, err error) *MockBlockService_UploadFileBlock_Call {
|
||||
_c.Call.Return(fileObjectId, err)
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockBlockService_UploadFileBlock_Call) RunAndReturn(run func(string, block.UploadRequest) (string, error)) *MockBlockService_UploadFileBlock_Call {
|
||||
_c.Call.Return(run)
|
||||
return _c
|
||||
}
|
||||
|
||||
// NewMockBlockService creates a new instance of MockBlockService. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
||||
// The first argument is typically a *testing.T value.
|
||||
func NewMockBlockService(t interface {
|
||||
|
|
|
@ -10,13 +10,14 @@ import (
|
|||
"github.com/anyproto/anytype-heart/core/block/simple"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/core/domain/objectorigin"
|
||||
"github.com/anyproto/anytype-heart/core/session"
|
||||
)
|
||||
|
||||
type BlockService interface {
|
||||
GetObject(ctx context.Context, objectID string) (sb smartblock.SmartBlock, err error)
|
||||
GetObjectByFullID(ctx context.Context, id domain.FullID) (sb smartblock.SmartBlock, err error)
|
||||
UploadFile(ctx context.Context, spaceId string, req block.FileUploadRequest) (objectId string, details *types.Struct, err error)
|
||||
UploadFileBlock(contextID string, req block.UploadRequest) (fileObjectId string, err error)
|
||||
UploadBlockFile(ctx session.Context, req block.UploadRequest, groupID string, isSync bool) (fileObjectId string, err error)
|
||||
}
|
||||
|
||||
type Syncer interface {
|
||||
|
|
25
core/block/import/common/test/utils.go
Normal file
25
core/block/import/common/test/utils.go
Normal file
|
@ -0,0 +1,25 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"fmt"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func CreateEmptyZip(t *testing.T, zipFileName string) error {
|
||||
zipFile, err := os.Create(zipFileName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to create zip file: %w\n", err)
|
||||
}
|
||||
defer zipFile.Close()
|
||||
|
||||
zipWriter := zip.NewWriter(zipFile)
|
||||
defer func() {
|
||||
err = zipWriter.Close()
|
||||
assert.NoError(t, err)
|
||||
}()
|
||||
return nil
|
||||
}
|
|
@ -68,7 +68,7 @@ func (c *CollectionStrategy) CreateObjects(path string, csvTable [][]string, par
|
|||
snapshots = append(snapshots, relationsSnapshots...)
|
||||
progress.AddDone(1)
|
||||
if errRelationLimit != nil || errRowLimit != nil {
|
||||
return "", nil, common.ErrLimitExceeded
|
||||
return "", nil, common.ErrCsvLimitExceeded
|
||||
}
|
||||
return snapshot.Id, snapshots, nil
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ func getDetailsFromCSVTable(csvTable [][]string, useFirstRowForRelations bool) (
|
|||
var err error
|
||||
numberOfRelationsLimit := len(allRelations)
|
||||
if numberOfRelationsLimit > limitForColumns {
|
||||
err = common.ErrLimitExceeded
|
||||
err = common.ErrCsvLimitExceeded
|
||||
numberOfRelationsLimit = limitForColumns
|
||||
}
|
||||
allRelations = findUniqueRelationAndAddNumber(allRelations)
|
||||
|
@ -193,7 +193,7 @@ func getObjectsFromCSVRows(path string, csvTable [][]string, relations []*model.
|
|||
numberOfObjectsLimit := len(csvTable)
|
||||
var err error
|
||||
if numberOfObjectsLimit > limitForRows {
|
||||
err = common.ErrLimitExceeded
|
||||
err = common.ErrCsvLimitExceeded
|
||||
numberOfObjectsLimit = limitForRows
|
||||
if params.UseFirstRowForRelations {
|
||||
numberOfObjectsLimit++ // because first row is relations, so we need to add plus 1 row
|
||||
|
|
|
@ -118,13 +118,11 @@ func (c *CSV) getSnapshotsFromFiles(req *pb.RpcObjectImportRequest,
|
|||
err := importSource.Initialize(importPath)
|
||||
if err != nil {
|
||||
allErrors.Add(fmt.Errorf("failed to extract files: %w", err))
|
||||
if allErrors.ShouldAbortImport(len(params.GetPath()), req.Type) {
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
var numberOfFiles int
|
||||
if numberOfFiles = importSource.CountFilesWithGivenExtensions([]string{".csv"}); numberOfFiles == 0 {
|
||||
allErrors.Add(common.ErrNoObjectsToImport)
|
||||
allErrors.Add(common.ErrorBySourceType(importSource))
|
||||
return nil
|
||||
}
|
||||
progress.SetProgressMessage("Start creating snapshots from files")
|
||||
|
|
|
@ -3,6 +3,7 @@ package csv
|
|||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
|
@ -12,6 +13,7 @@ import (
|
|||
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/template"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/test"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
|
@ -22,10 +24,10 @@ import (
|
|||
|
||||
func TestCsv_GetSnapshotsEmptyFile(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{Path: []string{"testdata/test.csv"}},
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{Path: []string{filepath.Join("testdata", "test.csv")}},
|
||||
},
|
||||
Type: model.Import_Csv,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
|
@ -47,11 +49,11 @@ func TestCsv_GetSnapshotsEmptyFile(t *testing.T) {
|
|||
|
||||
func TestCsv_GetSnapshots(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/Journal.csv"},
|
||||
Path: []string{filepath.Join("testdata", "Journal.csv")},
|
||||
UseFirstRowForRelations: true},
|
||||
},
|
||||
Type: model.Import_Csv,
|
||||
|
@ -80,11 +82,11 @@ func TestCsv_GetSnapshots(t *testing.T) {
|
|||
|
||||
func TestCsv_GetSnapshotsTable(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/Journal.csv"},
|
||||
Path: []string{filepath.Join("testdata", "Journal.csv")},
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_TABLE,
|
||||
},
|
||||
},
|
||||
|
@ -109,11 +111,11 @@ func TestCsv_GetSnapshotsTable(t *testing.T) {
|
|||
|
||||
func TestCsv_GetSnapshotsTableUseFirstColumnForRelationsOn(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/Journal.csv"},
|
||||
Path: []string{filepath.Join("testdata", "Journal.csv")},
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_TABLE,
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -150,10 +152,14 @@ func TestCsv_GetSnapshotsTableUseFirstColumnForRelationsOn(t *testing.T) {
|
|||
|
||||
func TestCsv_GetSnapshotsSemiColon(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{Path: []string{"testdata/semicolon.csv"}, Delimiter: ";", UseFirstRowForRelations: true},
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{filepath.Join("testdata", "semicolon.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
},
|
||||
Type: model.Import_Csv,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
|
@ -170,11 +176,11 @@ func TestCsv_GetSnapshotsSemiColon(t *testing.T) {
|
|||
func TestCsv_GetSnapshotsTranspose(t *testing.T) {
|
||||
t.Run("number of columns equal", func(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/transpose.csv"},
|
||||
Path: []string{filepath.Join("testdata", "transpose.csv")},
|
||||
Delimiter: ";",
|
||||
TransposeRowsAndColumns: true,
|
||||
UseFirstRowForRelations: true,
|
||||
|
@ -211,7 +217,7 @@ func TestCsv_GetSnapshotsTranspose(t *testing.T) {
|
|||
t.Run("number of columns is not equal", func(t *testing.T) {
|
||||
// given
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
|
||||
// when
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
|
@ -243,11 +249,11 @@ func TestCsv_GetSnapshotsTranspose(t *testing.T) {
|
|||
|
||||
func TestCsv_GetSnapshotsTransposeUseFirstRowForRelationsOff(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/transpose.csv"},
|
||||
Path: []string{filepath.Join("testdata", "transpose.csv")},
|
||||
Delimiter: ";",
|
||||
TransposeRowsAndColumns: true,
|
||||
UseFirstRowForRelations: false,
|
||||
|
@ -271,11 +277,11 @@ func TestCsv_GetSnapshotsTransposeUseFirstRowForRelationsOff(t *testing.T) {
|
|||
|
||||
func TestCsv_GetSnapshotsUseFirstColumnForRelationsOn(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/Journal.csv"},
|
||||
Path: []string{filepath.Join("testdata", "Journal.csv")},
|
||||
Delimiter: ",",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -318,11 +324,11 @@ func assertSnapshotsHaveDetails(t *testing.T, want []string, objects *common.Sna
|
|||
|
||||
func TestCsv_GetSnapshotsUseFirstColumnForRelationsOff(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/Journal.csv"},
|
||||
Path: []string{filepath.Join("testdata", "Journal.csv")},
|
||||
Delimiter: ",",
|
||||
},
|
||||
},
|
||||
|
@ -373,11 +379,11 @@ func TestCsv_GetSnapshotsUseFirstColumnForRelationsOff(t *testing.T) {
|
|||
|
||||
func TestCsv_GetSnapshotsQuotedStrings(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/quotedstrings.csv"},
|
||||
Path: []string{filepath.Join("testdata", "quotedstrings.csv")},
|
||||
Delimiter: ",",
|
||||
TransposeRowsAndColumns: true,
|
||||
UseFirstRowForRelations: true,
|
||||
|
@ -394,11 +400,11 @@ func TestCsv_GetSnapshotsQuotedStrings(t *testing.T) {
|
|||
|
||||
func TestCsv_GetSnapshotsBigFile(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/bigfile.csv", "testdata/transpose.csv"},
|
||||
Path: []string{filepath.Join("testdata", "bigfile.csv"), filepath.Join("testdata", "transpose.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -408,18 +414,18 @@ func TestCsv_GetSnapshotsBigFile(t *testing.T) {
|
|||
}, p)
|
||||
|
||||
assert.NotNil(t, err)
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Csv), common.ErrLimitExceeded))
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Csv), common.ErrCsvLimitExceeded))
|
||||
assert.Nil(t, sn)
|
||||
}
|
||||
|
||||
func TestCsv_GetSnapshotsEmptyFirstLineUseFirstColumnForRelationsOn(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(ctx, &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/emptyfirstline.csv"},
|
||||
Path: []string{filepath.Join("testdata", "emptyfirstline.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -443,11 +449,11 @@ func TestCsv_GetSnapshotsEmptyFirstLineUseFirstColumnForRelationsOn(t *testing.T
|
|||
func TestCsv_GetSnapshotsEmptyFirstLineUseFirstColumnForRelationsOff(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(ctx, &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/emptyfirstline.csv"},
|
||||
Path: []string{filepath.Join("testdata", "emptyfirstline.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: false,
|
||||
},
|
||||
|
@ -489,12 +495,12 @@ func TestCsv_GetSnapshotsEmptyFirstLineUseFirstColumnForRelationsOff(t *testing.
|
|||
func TestCsv_GetSnapshots1000RowsFile(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
// UseFirstRowForRelations is off
|
||||
sn, _ := csv.GetSnapshots(ctx, &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/1000_rows.csv"},
|
||||
Path: []string{filepath.Join("testdata", "1000_rows.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: false,
|
||||
},
|
||||
|
@ -520,7 +526,7 @@ func TestCsv_GetSnapshots1000RowsFile(t *testing.T) {
|
|||
sn, _ = csv.GetSnapshots(ctx, &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/1000_rows.csv"},
|
||||
Path: []string{filepath.Join("testdata", "1000_rows.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -595,11 +601,11 @@ func Test_findUniqueRelationAndAddNumber(t *testing.T) {
|
|||
|
||||
func Test_findUniqueRelationWithSpaces(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/relationswithspaces.csv"},
|
||||
Path: []string{filepath.Join("testdata", "relationswithspaces.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -637,12 +643,12 @@ func Test_findUniqueRelationWithSpaces(t *testing.T) {
|
|||
|
||||
func TestCsv_GetSnapshots10Relations(t *testing.T) {
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
// UseFirstRowForRelations is off
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/10_relations.csv"},
|
||||
Path: []string{filepath.Join("testdata", "10_relations.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: false,
|
||||
},
|
||||
|
@ -673,7 +679,7 @@ func TestCsv_GetSnapshots10Relations(t *testing.T) {
|
|||
sn, err = csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/10_relations.csv"},
|
||||
Path: []string{filepath.Join("testdata", "10_relations.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -705,13 +711,13 @@ func TestCsv_GetSnapshotsTableModeDifferentColumnsNumber(t *testing.T) {
|
|||
t.Run("test different columns number in file - table mode", func(t *testing.T) {
|
||||
// given
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
|
||||
// when
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/differentcolumnnumber.csv"},
|
||||
Path: []string{filepath.Join("testdata", "differentcolumnnumber.csv")},
|
||||
Delimiter: ",",
|
||||
UseFirstRowForRelations: true,
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_TABLE,
|
||||
|
@ -743,13 +749,13 @@ func TestCsv_GetSnapshotsTableModeDifferentColumnsNumber(t *testing.T) {
|
|||
t.Run("test different columns number in file - collection mode", func(t *testing.T) {
|
||||
// given
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
|
||||
// when
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/differentcolumnnumber.csv"},
|
||||
Path: []string{filepath.Join("testdata", "differentcolumnnumber.csv")},
|
||||
Delimiter: ",",
|
||||
UseFirstRowForRelations: true,
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_COLLECTION,
|
||||
|
@ -781,6 +787,86 @@ func TestCsv_GetSnapshotsTableModeDifferentColumnsNumber(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestCSV_GetSnapshots(t *testing.T) {
|
||||
t.Run("file not exist", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
file := "archive.zip"
|
||||
filePath := filepath.Join(dir, file)
|
||||
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
|
||||
// when
|
||||
_, ce := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{filePath},
|
||||
Delimiter: ",",
|
||||
UseFirstRowForRelations: true,
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_TABLE,
|
||||
},
|
||||
},
|
||||
Type: model.Import_Csv,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.False(t, ce.IsEmpty())
|
||||
})
|
||||
t.Run("no object in archive", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
zipPath := filepath.Join(dir, "empty.zip")
|
||||
err := test.CreateEmptyZip(t, zipPath)
|
||||
assert.Nil(t, err)
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
|
||||
// when
|
||||
_, ce := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{zipPath},
|
||||
Delimiter: ",",
|
||||
UseFirstRowForRelations: true,
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_TABLE,
|
||||
},
|
||||
},
|
||||
Type: model.Import_Csv,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Csv), common.ErrFileImportNoObjectsInZipArchive))
|
||||
})
|
||||
t.Run("no object in dir", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
|
||||
// when
|
||||
_, ce := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{dir},
|
||||
Delimiter: ",",
|
||||
UseFirstRowForRelations: true,
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_TABLE,
|
||||
},
|
||||
},
|
||||
Type: model.Import_Csv,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Csv), common.ErrFileImportNoObjectsInDirectory))
|
||||
})
|
||||
}
|
||||
func getRelationsNumber(keys []string) int {
|
||||
return lo.CountBy(keys, func(item string) bool {
|
||||
return item != bundle.RelationKeySourceFilePath.String() && item != bundle.RelationKeyLayout.String()
|
||||
|
|
|
@ -2,6 +2,7 @@ package html
|
|||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"path/filepath"
|
||||
|
||||
|
@ -119,7 +120,7 @@ func (h *HTML) handleImportPath(path string, allErrors *common.ConvertError) ([]
|
|||
}
|
||||
var numberOfFiles int
|
||||
if numberOfFiles = importSource.CountFilesWithGivenExtensions([]string{".html"}); numberOfFiles == 0 {
|
||||
allErrors.Add(common.ErrNoObjectsToImport)
|
||||
allErrors.Add(common.ErrorBySourceType(importSource))
|
||||
return nil, nil
|
||||
}
|
||||
return h.getSnapshotsAndRootObjects(path, allErrors, numberOfFiles, importSource)
|
||||
|
@ -160,7 +161,7 @@ func (h *HTML) getBlocksForSnapshot(rc io.ReadCloser, filesSource source.Source,
|
|||
}
|
||||
blocks, _, err := anymark.HTMLToBlocks(b, "")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("%w: %s", common.ErrWrongHTMLFormat, err.Error())
|
||||
}
|
||||
for _, block := range blocks {
|
||||
if block.GetFile() != nil {
|
||||
|
|
|
@ -14,6 +14,7 @@ import (
|
|||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/source"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/test"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
|
@ -28,28 +29,79 @@ func (p *MockTempDirProvider) TempDir() string {
|
|||
}
|
||||
|
||||
func TestHTML_GetSnapshots(t *testing.T) {
|
||||
h := &HTML{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
sn, err := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfHtmlParams{
|
||||
HtmlParams: &pb.RpcObjectImportRequestHtmlParams{Path: []string{"testdata/test.html", "testdata/test"}},
|
||||
},
|
||||
Type: model.Import_Html,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
t.Run("success", func(t *testing.T) {
|
||||
h := &HTML{}
|
||||
p := process.NewNoOp()
|
||||
sn, err := h.GetSnapshots(
|
||||
context.Background(),
|
||||
&pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfHtmlParams{
|
||||
HtmlParams: &pb.RpcObjectImportRequestHtmlParams{Path: []string{filepath.Join("testdata", "test.html"), filepath.Join("testdata", "test")}},
|
||||
},
|
||||
Type: model.Import_Html,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
},
|
||||
p,
|
||||
)
|
||||
|
||||
assert.NotNil(t, sn)
|
||||
assert.Len(t, sn.Snapshots, 2)
|
||||
assert.Contains(t, sn.Snapshots[0].FileName, "test.html")
|
||||
assert.NotEmpty(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"])
|
||||
assert.Equal(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"], pbtypes.String("test"))
|
||||
assert.NotNil(t, sn)
|
||||
assert.Len(t, sn.Snapshots, 2)
|
||||
assert.Contains(t, sn.Snapshots[0].FileName, "test.html")
|
||||
assert.NotEmpty(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"])
|
||||
assert.Equal(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"], pbtypes.String("test"))
|
||||
|
||||
assert.Contains(t, sn.Snapshots[1].FileName, rootCollectionName)
|
||||
assert.NotEmpty(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes)
|
||||
assert.Equal(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes[0], bundle.TypeKeyCollection.String())
|
||||
assert.Contains(t, sn.Snapshots[1].FileName, rootCollectionName)
|
||||
assert.NotEmpty(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes)
|
||||
assert.Equal(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes[0], bundle.TypeKeyCollection.String())
|
||||
|
||||
assert.NotEmpty(t, err)
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Html), common.ErrNoObjectsToImport))
|
||||
assert.NotEmpty(t, err)
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Html), common.ErrFileImportNoObjectsInDirectory))
|
||||
})
|
||||
t.Run("no object in archive", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
zipPath := filepath.Join(dir, "empty.zip")
|
||||
err := test.CreateEmptyZip(t, zipPath)
|
||||
assert.Nil(t, err)
|
||||
html := HTML{}
|
||||
p := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
|
||||
// when
|
||||
_, ce := html.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfHtmlParams{
|
||||
HtmlParams: &pb.RpcObjectImportRequestHtmlParams{
|
||||
Path: []string{zipPath},
|
||||
},
|
||||
},
|
||||
Type: model.Import_Html,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Html), common.ErrFileImportNoObjectsInZipArchive))
|
||||
})
|
||||
t.Run("no object in dir", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
html := HTML{}
|
||||
p := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
|
||||
// when
|
||||
_, ce := html.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfHtmlParams{
|
||||
HtmlParams: &pb.RpcObjectImportRequestHtmlParams{
|
||||
Path: []string{dir},
|
||||
},
|
||||
},
|
||||
Type: model.Import_Html,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Html), common.ErrFileImportNoObjectsInDirectory))
|
||||
})
|
||||
}
|
||||
|
||||
func TestHTML_provideFileName(t *testing.T) {
|
||||
|
@ -73,9 +125,10 @@ func TestHTML_provideFileName(t *testing.T) {
|
|||
currentDir, err := os.Getwd()
|
||||
assert.Nil(t, err)
|
||||
source := source.GetSource(currentDir)
|
||||
filePath := filepath.Join("testdata", "test")
|
||||
|
||||
// when
|
||||
absPath, err := filepath.Abs("testdata/test")
|
||||
absPath, err := filepath.Abs(filePath)
|
||||
assert.Nil(t, err)
|
||||
newFileName, _, err := common.ProvideFileName(absPath, source, currentDir, h.tempDirProvider)
|
||||
|
||||
|
@ -89,13 +142,14 @@ func TestHTML_provideFileName(t *testing.T) {
|
|||
currentDir, err := os.Getwd()
|
||||
assert.Nil(t, err)
|
||||
source := source.GetSource(currentDir)
|
||||
filePath := filepath.Join("testdata", "test")
|
||||
|
||||
// when
|
||||
newFileName, _, err := common.ProvideFileName("testdata/test", source, currentDir, h.tempDirProvider)
|
||||
newFileName, _, err := common.ProvideFileName(filePath, source, currentDir, h.tempDirProvider)
|
||||
|
||||
// then
|
||||
assert.Nil(t, err)
|
||||
absPath, err := filepath.Abs("testdata/test")
|
||||
absPath, err := filepath.Abs(filePath)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, absPath, newFileName)
|
||||
})
|
||||
|
|
|
@ -248,7 +248,7 @@ func (i *Import) importFromExternalSource(ctx context.Context, req *ImportReques
|
|||
}
|
||||
return int64(len(details)), nil
|
||||
}
|
||||
return 0, common.ErrNoObjectsToImport
|
||||
return 0, common.ErrNoSnapshotToImport
|
||||
}
|
||||
|
||||
func (i *Import) finishImportProcess(returnedErr error, req *ImportRequest) {
|
||||
|
@ -266,7 +266,7 @@ func (i *Import) provideNotification(returnedErr error, progress process.Progres
|
|||
Space: req.SpaceId,
|
||||
Payload: &model.NotificationPayloadOfImport{Import: &model.NotificationImport{
|
||||
ProcessId: progress.Id(),
|
||||
ErrorCode: common.GetImportErrorCode(returnedErr),
|
||||
ErrorCode: common.GetImportNotificationErrorCode(returnedErr),
|
||||
ImportType: req.Type,
|
||||
SpaceId: req.SpaceId,
|
||||
}},
|
||||
|
@ -275,15 +275,15 @@ func (i *Import) provideNotification(returnedErr error, progress process.Progres
|
|||
|
||||
func shouldReturnError(e error, res *common.Response, req *pb.RpcObjectImportRequest) bool {
|
||||
return (e != nil && req.Mode != pb.RpcObjectImportRequest_IGNORE_ERRORS) ||
|
||||
errors.Is(e, common.ErrFailedToReceiveListOfObjects) || errors.Is(e, common.ErrLimitExceeded) ||
|
||||
(errors.Is(e, common.ErrNoObjectsToImport) && (res == nil || len(res.Snapshots) == 0)) || // return error only if we don't have object to import
|
||||
errors.Is(e, common.ErrNotionServerExceedRateLimit) || errors.Is(e, common.ErrCsvLimitExceeded) ||
|
||||
(common.IsNoObjectError(e) && (res == nil || len(res.Snapshots) == 0)) || // return error only if we don't have object to import
|
||||
errors.Is(e, common.ErrCancel)
|
||||
}
|
||||
|
||||
func (i *Import) setupProgressBar(req *ImportRequest) {
|
||||
progressBarType := pb.ModelProcess_Import
|
||||
var progressBarType pb.IsModelProcessMessage = &pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}}
|
||||
if req.IsMigration {
|
||||
progressBarType = pb.ModelProcess_Migration
|
||||
progressBarType = &pb.ModelProcessMessageOfMigration{Migration: &pb.ModelProcessMigration{}}
|
||||
}
|
||||
var progress process.Progress
|
||||
if req.GetNoProgress() {
|
||||
|
@ -320,14 +320,22 @@ func (i *Import) ValidateNotionToken(
|
|||
return tv.Validate(ctx, req.GetToken())
|
||||
}
|
||||
|
||||
func (i *Import) ImportWeb(ctx context.Context, req *pb.RpcObjectImportRequest) (string, *types.Struct, error) {
|
||||
progress := process.NewProgress(pb.ModelProcess_Import)
|
||||
defer progress.Finish(nil)
|
||||
func (i *Import) ImportWeb(ctx context.Context, req *ImportRequest) (string, *types.Struct, error) {
|
||||
if req.Progress == nil {
|
||||
i.setupProgressBar(req)
|
||||
}
|
||||
defer req.Progress.Finish(nil)
|
||||
if i.s != nil {
|
||||
err := i.s.ProcessAdd(req.Progress)
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("failed to add process")
|
||||
}
|
||||
}
|
||||
allErrors := common.NewError(0)
|
||||
|
||||
progress.SetProgressMessage("Parse url")
|
||||
req.Progress.SetProgressMessage("Parse url")
|
||||
w := i.converters[web.Name]
|
||||
res, err := w.GetSnapshots(ctx, req, progress)
|
||||
res, err := w.GetSnapshots(ctx, req.RpcObjectImportRequest, req.Progress)
|
||||
|
||||
if err != nil {
|
||||
return "", nil, err.Error()
|
||||
|
@ -336,8 +344,8 @@ func (i *Import) ImportWeb(ctx context.Context, req *pb.RpcObjectImportRequest)
|
|||
return "", nil, fmt.Errorf("snpashots are empty")
|
||||
}
|
||||
|
||||
progress.SetProgressMessage("Create objects")
|
||||
details, _ := i.createObjects(ctx, res, progress, req, allErrors, objectorigin.None())
|
||||
req.Progress.SetProgressMessage("Create objects")
|
||||
details, _ := i.createObjects(ctx, res, req.Progress, req.RpcObjectImportRequest, allErrors, objectorigin.None())
|
||||
if !allErrors.IsEmpty() {
|
||||
return "", nil, fmt.Errorf("couldn't create objects")
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import (
|
|||
"go.uber.org/mock/gomock"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/objectid/mock_objectid"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/mock_common"
|
||||
|
@ -375,7 +376,7 @@ func Test_ImportExternalPluginError(t *testing.T) {
|
|||
}
|
||||
res := i.Import(context.Background(), importRequest)
|
||||
assert.NotNil(t, res)
|
||||
assert.Contains(t, res.Err.Error(), common.ErrNoObjectsToImport.Error())
|
||||
assert.Contains(t, res.Err.Error(), common.ErrNoSnapshotToImport.Error())
|
||||
}
|
||||
|
||||
func Test_ListImports(t *testing.T) {
|
||||
|
@ -402,9 +403,12 @@ func Test_ImportWebNoParser(t *testing.T) {
|
|||
creator := mock_objectcreator.NewMockService(t)
|
||||
i.oc = creator
|
||||
i.idProvider = mock_objectid.NewMockIdAndKeyProvider(t)
|
||||
_, _, err := i.ImportWeb(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfBookmarksParams{BookmarksParams: &pb.RpcObjectImportRequestBookmarksParams{Url: "http://example.com"}},
|
||||
UpdateExistingObjects: true,
|
||||
_, _, err := i.ImportWeb(context.Background(), &ImportRequest{
|
||||
RpcObjectImportRequest: &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfBookmarksParams{BookmarksParams: &pb.RpcObjectImportRequestBookmarksParams{Url: "http://example.com"}},
|
||||
UpdateExistingObjects: true,
|
||||
},
|
||||
Progress: process.NewNoOp(),
|
||||
})
|
||||
|
||||
assert.NotNil(t, err)
|
||||
|
@ -430,9 +434,12 @@ func Test_ImportWebFailedToParse(t *testing.T) {
|
|||
}
|
||||
parsers.RegisterFunc(new)
|
||||
|
||||
_, _, err := i.ImportWeb(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfBookmarksParams{BookmarksParams: &pb.RpcObjectImportRequestBookmarksParams{Url: "http://example.com"}},
|
||||
UpdateExistingObjects: true,
|
||||
_, _, err := i.ImportWeb(context.Background(), &ImportRequest{
|
||||
RpcObjectImportRequest: &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfBookmarksParams{BookmarksParams: &pb.RpcObjectImportRequestBookmarksParams{Url: "http://example.com"}},
|
||||
UpdateExistingObjects: true,
|
||||
},
|
||||
Progress: process.NewNoOp(),
|
||||
})
|
||||
|
||||
assert.NotNil(t, err)
|
||||
|
@ -472,9 +479,12 @@ func Test_ImportWebSuccess(t *testing.T) {
|
|||
}
|
||||
parsers.RegisterFunc(new)
|
||||
|
||||
_, _, err := i.ImportWeb(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfBookmarksParams{BookmarksParams: &pb.RpcObjectImportRequestBookmarksParams{Url: "http://example.com"}},
|
||||
UpdateExistingObjects: true,
|
||||
_, _, err := i.ImportWeb(context.Background(), &ImportRequest{
|
||||
RpcObjectImportRequest: &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfBookmarksParams{BookmarksParams: &pb.RpcObjectImportRequestBookmarksParams{Url: "http://example.com"}},
|
||||
UpdateExistingObjects: true,
|
||||
},
|
||||
Progress: process.NewNoOp(),
|
||||
})
|
||||
|
||||
assert.Nil(t, err)
|
||||
|
@ -514,9 +524,12 @@ func Test_ImportWebFailedToCreateObject(t *testing.T) {
|
|||
}
|
||||
parsers.RegisterFunc(new)
|
||||
|
||||
_, _, err := i.ImportWeb(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfBookmarksParams{BookmarksParams: &pb.RpcObjectImportRequestBookmarksParams{Url: "http://example.com"}},
|
||||
UpdateExistingObjects: true,
|
||||
_, _, err := i.ImportWeb(context.Background(), &ImportRequest{
|
||||
RpcObjectImportRequest: &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfBookmarksParams{BookmarksParams: &pb.RpcObjectImportRequestBookmarksParams{Url: "http://example.com"}},
|
||||
UpdateExistingObjects: true,
|
||||
},
|
||||
Progress: process.NewNoOp(),
|
||||
})
|
||||
|
||||
assert.NotNil(t, err)
|
||||
|
@ -556,7 +569,7 @@ func Test_ImportCancelError(t *testing.T) {
|
|||
func Test_ImportNoObjectToImportError(t *testing.T) {
|
||||
i := Import{}
|
||||
converter := mock_common.NewMockConverter(t)
|
||||
e := common.NewFromError(common.ErrNoObjectsToImport, pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
e := common.NewFromError(common.ErrNoObjectInIntegration, pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
converter.EXPECT().GetSnapshots(mock.Anything, mock.Anything, mock.Anything).Return(&common.Response{Snapshots: nil}, e).Times(1)
|
||||
i.converters = make(map[string]common.Converter, 0)
|
||||
i.converters["Notion"] = converter
|
||||
|
@ -581,13 +594,13 @@ func Test_ImportNoObjectToImportError(t *testing.T) {
|
|||
res := i.Import(context.Background(), importRequest)
|
||||
|
||||
assert.NotNil(t, res.Err)
|
||||
assert.True(t, errors.Is(res.Err, common.ErrNoObjectsToImport))
|
||||
assert.True(t, errors.Is(res.Err, common.ErrNoObjectInIntegration))
|
||||
}
|
||||
|
||||
func Test_ImportNoObjectToImportErrorModeAllOrNothing(t *testing.T) {
|
||||
i := Import{}
|
||||
converter := mock_common.NewMockConverter(t)
|
||||
e := common.NewFromError(common.ErrNoObjectsToImport, pb.RpcObjectImportRequest_ALL_OR_NOTHING)
|
||||
e := common.NewFromError(common.ErrNoObjectInIntegration, pb.RpcObjectImportRequest_ALL_OR_NOTHING)
|
||||
converter.EXPECT().GetSnapshots(mock.Anything, mock.Anything, mock.Anything).Return(&common.Response{Snapshots: []*common.Snapshot{{
|
||||
Snapshot: &pb.ChangeSnapshot{
|
||||
Data: &model.SmartBlockSnapshotBase{
|
||||
|
@ -627,12 +640,12 @@ func Test_ImportNoObjectToImportErrorModeAllOrNothing(t *testing.T) {
|
|||
res := i.Import(context.Background(), importRequest)
|
||||
|
||||
assert.NotNil(t, res.Err)
|
||||
assert.True(t, errors.Is(res.Err, common.ErrNoObjectsToImport))
|
||||
assert.True(t, errors.Is(res.Err, common.ErrNoObjectInIntegration))
|
||||
}
|
||||
|
||||
func Test_ImportNoObjectToImportErrorIgnoreErrorsMode(t *testing.T) {
|
||||
i := Import{}
|
||||
e := common.NewFromError(common.ErrNoObjectsToImport, pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
e := common.NewFromError(common.ErrNoObjectInIntegration, pb.RpcObjectImportRequest_IGNORE_ERRORS)
|
||||
converter := mock_common.NewMockConverter(t)
|
||||
converter.EXPECT().GetSnapshots(mock.Anything, mock.Anything, mock.Anything).Return(&common.Response{Snapshots: []*common.Snapshot{{
|
||||
Snapshot: &pb.ChangeSnapshot{
|
||||
|
@ -682,13 +695,13 @@ func Test_ImportNoObjectToImportErrorIgnoreErrorsMode(t *testing.T) {
|
|||
res := i.Import(context.Background(), importRequest)
|
||||
|
||||
assert.NotNil(t, res.Err)
|
||||
assert.True(t, errors.Is(res.Err, common.ErrNoObjectsToImport))
|
||||
assert.True(t, errors.Is(res.Err, common.ErrNoObjectInIntegration))
|
||||
}
|
||||
|
||||
func Test_ImportErrLimitExceeded(t *testing.T) {
|
||||
i := Import{}
|
||||
converter := mock_common.NewMockConverter(t)
|
||||
e := common.NewFromError(common.ErrLimitExceeded, pb.RpcObjectImportRequest_ALL_OR_NOTHING)
|
||||
e := common.NewFromError(common.ErrCsvLimitExceeded, pb.RpcObjectImportRequest_ALL_OR_NOTHING)
|
||||
converter.EXPECT().GetSnapshots(mock.Anything, mock.Anything, mock.Anything).Return(&common.Response{Snapshots: []*common.Snapshot{{
|
||||
Snapshot: &pb.ChangeSnapshot{
|
||||
Data: &model.SmartBlockSnapshotBase{
|
||||
|
@ -729,13 +742,13 @@ func Test_ImportErrLimitExceeded(t *testing.T) {
|
|||
res := i.Import(context.Background(), importRequest)
|
||||
|
||||
assert.NotNil(t, res.Err)
|
||||
assert.True(t, errors.Is(res.Err, common.ErrLimitExceeded))
|
||||
assert.True(t, errors.Is(res.Err, common.ErrCsvLimitExceeded))
|
||||
}
|
||||
|
||||
func Test_ImportErrLimitExceededIgnoreErrorMode(t *testing.T) {
|
||||
i := Import{}
|
||||
converter := mock_common.NewMockConverter(t)
|
||||
e := common.NewFromError(common.ErrLimitExceeded, pb.RpcObjectImportRequest_ALL_OR_NOTHING)
|
||||
e := common.NewFromError(common.ErrCsvLimitExceeded, pb.RpcObjectImportRequest_ALL_OR_NOTHING)
|
||||
converter.EXPECT().GetSnapshots(mock.Anything, mock.Anything, mock.Anything).Return(&common.Response{Snapshots: []*common.Snapshot{{
|
||||
Snapshot: &pb.ChangeSnapshot{
|
||||
Data: &model.SmartBlockSnapshotBase{
|
||||
|
@ -776,7 +789,7 @@ func Test_ImportErrLimitExceededIgnoreErrorMode(t *testing.T) {
|
|||
res := i.Import(context.Background(), importRequest)
|
||||
|
||||
assert.NotNil(t, res.Err)
|
||||
assert.True(t, errors.Is(res.Err, common.ErrLimitExceeded))
|
||||
assert.True(t, errors.Is(res.Err, common.ErrCsvLimitExceeded))
|
||||
}
|
||||
|
||||
func TestImport_replaceRelationKeyWithNew(t *testing.T) {
|
||||
|
|
|
@ -51,7 +51,7 @@ func (m *mdConverter) processFiles(importPath string, allErrors *common.ConvertE
|
|||
}
|
||||
}
|
||||
if importSource.CountFilesWithGivenExtensions([]string{".md"}) == 0 {
|
||||
allErrors.Add(common.ErrNoObjectsToImport)
|
||||
allErrors.Add(common.ErrorBySourceType(importSource))
|
||||
return nil
|
||||
}
|
||||
fileInfo := m.getFileInfo(importSource, allErrors)
|
||||
|
|
|
@ -24,15 +24,18 @@ func Test_processFiles(t *testing.T) {
|
|||
t.Run("imported directory include mov and pdf files - md file has file blocks", func(t *testing.T) {
|
||||
// given
|
||||
converter := newMDConverter(&MockTempDir{})
|
||||
_, err := os.Create("./testdata/test.pdf")
|
||||
pdfFile := filepath.Join("testdata", "test.pdf")
|
||||
_, err := os.Create(pdfFile)
|
||||
assert.Nil(t, err)
|
||||
defer os.Remove("./testdata/test.pdf")
|
||||
_, err = os.Create("./testdata/test.mov")
|
||||
defer os.Remove(pdfFile)
|
||||
|
||||
movFile := filepath.Join("testdata", "test.mov")
|
||||
_, err = os.Create(movFile)
|
||||
assert.Nil(t, err)
|
||||
defer os.Remove("./testdata/test.mov")
|
||||
defer os.Remove(movFile)
|
||||
|
||||
workingDir, err := os.Getwd()
|
||||
absolutePath := filepath.Join(workingDir, "./testdata")
|
||||
absolutePath := filepath.Join(workingDir, "testdata")
|
||||
source := source.GetSource(absolutePath)
|
||||
|
||||
// when
|
||||
|
@ -62,10 +65,10 @@ func Test_processFiles(t *testing.T) {
|
|||
t.Run("imported directory include without mov and pdf files - no file blocks", func(t *testing.T) {
|
||||
// given
|
||||
converter := newMDConverter(&MockTempDir{})
|
||||
source := source.GetSource("./testdata")
|
||||
source := source.GetSource("testdata")
|
||||
workingDir, err := os.Getwd()
|
||||
assert.Nil(t, err)
|
||||
absolutePath := filepath.Join(workingDir, "./testdata")
|
||||
absolutePath := filepath.Join(workingDir, "testdata")
|
||||
|
||||
// when
|
||||
files := converter.processFiles(absolutePath, common.NewError(pb.RpcObjectImportRequest_IGNORE_ERRORS), source)
|
||||
|
|
|
@ -2,12 +2,15 @@ package markdown
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/test"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
|
@ -19,7 +22,7 @@ func TestMarkdown_GetSnapshots(t *testing.T) {
|
|||
// given
|
||||
testDirectory := setupTestDirectory(t)
|
||||
h := &Markdown{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
|
||||
// when
|
||||
sn, err := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
|
@ -39,7 +42,7 @@ func TestMarkdown_GetSnapshots(t *testing.T) {
|
|||
subPageId string
|
||||
)
|
||||
for _, snapshot := range sn.Snapshots {
|
||||
if snapshot.FileName == filepath.Join(testDirectory, "test_database/test.md") {
|
||||
if snapshot.FileName == filepath.Join(testDirectory, "test_database", "test.md") {
|
||||
subPageId = snapshot.Id
|
||||
break
|
||||
}
|
||||
|
@ -55,12 +58,11 @@ func TestMarkdown_GetSnapshots(t *testing.T) {
|
|||
}
|
||||
assert.True(t, found)
|
||||
})
|
||||
|
||||
t.Run("no object error", func(t *testing.T) {
|
||||
// given
|
||||
testDirectory := t.TempDir()
|
||||
h := &Markdown{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
|
||||
// when
|
||||
sn, err := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
|
@ -81,7 +83,7 @@ func TestMarkdown_GetSnapshots(t *testing.T) {
|
|||
tempDirProvider := &MockTempDir{}
|
||||
converter := newMDConverter(tempDirProvider)
|
||||
h := &Markdown{blockConverter: converter}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
p := process.NewNoOp()
|
||||
|
||||
// when
|
||||
sn, err := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
|
@ -113,6 +115,30 @@ func TestMarkdown_GetSnapshots(t *testing.T) {
|
|||
}
|
||||
assert.True(t, found)
|
||||
})
|
||||
t.Run("no object in archive", func(t *testing.T) {
|
||||
// given
|
||||
testDirectory := t.TempDir()
|
||||
zipPath := filepath.Join(testDirectory, "empty.zip")
|
||||
err := test.CreateEmptyZip(t, zipPath)
|
||||
assert.Nil(t, err)
|
||||
|
||||
h := &Markdown{}
|
||||
p := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
|
||||
// when
|
||||
sn, ce := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfMarkdownParams{
|
||||
MarkdownParams: &pb.RpcObjectImportRequestMarkdownParams{Path: []string{zipPath}},
|
||||
},
|
||||
Type: model.Import_Markdown,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.Nil(t, sn)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Markdown), common.ErrFileImportNoObjectsInZipArchive))
|
||||
})
|
||||
}
|
||||
|
||||
func buildExpectedTree(fileNameToObjectId map[string]string, provider *MockTempDir, rootId string) *blockbuilder.Block {
|
||||
|
|
|
@ -174,7 +174,7 @@ func Test_GetTextBlocksDateMention(t *testing.T) {
|
|||
assert.Equal(t, bl.Blocks[0].GetText().Style, model.BlockContentText_Paragraph)
|
||||
assert.Len(t, bl.Blocks[0].GetText().Marks.Marks, 1)
|
||||
assert.Equal(t, bl.Blocks[0].GetText().Marks.Marks[0].Type, model.BlockContentTextMark_Mention)
|
||||
assert.Equal(t, bl.Blocks[0].GetText().Marks.Marks[0].Param, "_date_2022-11-14")
|
||||
assert.Equal(t, bl.Blocks[0].GetText().Marks.Marks[0].Param, "_date_2022-11-14-00-00-00")
|
||||
}
|
||||
|
||||
func Test_GetTextBlocksLinkPreview(t *testing.T) {
|
||||
|
|
|
@ -2,13 +2,12 @@ package block
|
|||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/globalsign/mgo/bson"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/notion/api"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/addr"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
"github.com/anyproto/anytype-heart/util/dateutil"
|
||||
textUtil "github.com/anyproto/anytype-heart/util/text"
|
||||
)
|
||||
|
||||
|
@ -240,7 +239,7 @@ func (t *TextObject) handleDateMention(rt api.RichText,
|
|||
if rt.Mention.Date.End != "" {
|
||||
textDate = rt.Mention.Date.End
|
||||
}
|
||||
date, err := time.Parse(DateMentionTimeFormat, textDate)
|
||||
date, err := dateutil.ParseDateId(textDate)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
@ -253,7 +252,7 @@ func (t *TextObject) handleDateMention(rt api.RichText,
|
|||
To: int32(to),
|
||||
},
|
||||
Type: model.BlockContentTextMark_Mention,
|
||||
Param: addr.TimeToID(date),
|
||||
Param: dateutil.TimeToDateId(date),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,8 @@ package client
|
|||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
)
|
||||
|
||||
type NotionErrorResponse struct {
|
||||
|
@ -17,5 +19,11 @@ func TransformHTTPCodeToError(response []byte) error {
|
|||
if err := json.Unmarshal(response, ¬ionErr); err != nil {
|
||||
return nil
|
||||
}
|
||||
if notionErr.Status >= 500 {
|
||||
return fmt.Errorf("%w: %s", common.ErrNotionServerIsUnavailable, notionErr.Message)
|
||||
}
|
||||
if notionErr.Status == 429 {
|
||||
return fmt.Errorf("%w: %s", common.ErrNotionServerExceedRateLimit, notionErr.Message)
|
||||
}
|
||||
return fmt.Errorf("status: %d, code: %s, message: %s", notionErr.Status, notionErr.Code, notionErr.Message)
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ func Test_GetDatabaseSuccess(t *testing.T) {
|
|||
assert.Nil(t, err)
|
||||
|
||||
ds := database.New(nil)
|
||||
progress := process.NewProgress(pb.ModelProcess_Import)
|
||||
progress := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
downloader := mock_files.NewMockDownloader(t)
|
||||
downloader.EXPECT().QueueFileForDownload(mock.Anything).Return(nil, true)
|
||||
databases, _, ce := ds.GetDatabase(context.Background(), pb.RpcObjectImportRequest_ALL_OR_NOTHING, db, progress, api.NewNotionImportContext(), downloader)
|
||||
|
|
|
@ -64,7 +64,6 @@ func (n *Notion) GetSnapshots(ctx context.Context, req *pb.RpcObjectImportReques
|
|||
|
||||
// always add this error because it's mean that we need to return error to user, even in case IGNORE_ERRORS is turned on
|
||||
// see shouldReturnError
|
||||
ce.Add(common.ErrFailedToReceiveListOfObjects)
|
||||
log.With("error", ce.Error()).With("pages", len(pages)).With("dbs", len(db)).Error("import from notion failed")
|
||||
return nil, ce
|
||||
}
|
||||
|
@ -76,7 +75,7 @@ func (n *Notion) GetSnapshots(ctx context.Context, req *pb.RpcObjectImportReques
|
|||
return nil, common.NewFromError(common.ErrCancel, req.Mode)
|
||||
}
|
||||
if len(db) == 0 && len(pages) == 0 {
|
||||
return nil, common.NewFromError(common.ErrNoObjectsToImport, req.Mode)
|
||||
return nil, common.NewFromError(common.ErrNoObjectInIntegration, req.Mode)
|
||||
}
|
||||
|
||||
fileDownloader := files.NewFileDownloader(progress)
|
||||
|
|
|
@ -1,15 +1,112 @@
|
|||
package notion
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/notion/api/client"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/notion/api/database"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/notion/api/page"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/notion/api/property"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/notion/api/search"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
)
|
||||
|
||||
func TestNotion_GetSnapshots(t *testing.T) {
|
||||
t.Run("internal error from Notion", func(t *testing.T) {
|
||||
// given
|
||||
converter := &Notion{}
|
||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
w.Write([]byte(`{"object":"error","status":500,"code":"internal_error","message":"internal server error"}`))
|
||||
}))
|
||||
defer s.Close()
|
||||
c := client.NewClient()
|
||||
c.BasePath = s.URL
|
||||
converter.search = search.New(c)
|
||||
p := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
|
||||
// when
|
||||
_, ce := converter.GetSnapshots(
|
||||
context.Background(),
|
||||
&pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfNotionParams{NotionParams: &pb.RpcObjectImportRequestNotionParams{ApiKey: "key"}},
|
||||
Type: model.Import_Markdown,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
},
|
||||
p,
|
||||
)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Notion), common.ErrNotionServerIsUnavailable))
|
||||
})
|
||||
t.Run("rate limit error from Notion", func(t *testing.T) {
|
||||
// given
|
||||
converter := &Notion{}
|
||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusTooManyRequests)
|
||||
w.Write([]byte(`{"object":"error","status":429,"code":"rate_limit_error","message":"rate limit error"}`))
|
||||
}))
|
||||
defer s.Close()
|
||||
c := client.NewClient()
|
||||
c.BasePath = s.URL
|
||||
converter.search = search.New(c)
|
||||
p := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
|
||||
// when
|
||||
_, ce := converter.GetSnapshots(
|
||||
context.Background(),
|
||||
&pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfNotionParams{NotionParams: &pb.RpcObjectImportRequestNotionParams{ApiKey: "key"}},
|
||||
Type: model.Import_Markdown,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
},
|
||||
p,
|
||||
)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Notion), common.ErrNotionServerExceedRateLimit))
|
||||
})
|
||||
t.Run("no objects in integration", func(t *testing.T) {
|
||||
// given
|
||||
converter := &Notion{}
|
||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte(`{"object":"list","results":[]}`))
|
||||
}))
|
||||
defer s.Close()
|
||||
c := client.NewClient()
|
||||
c.BasePath = s.URL
|
||||
converter.search = search.New(c)
|
||||
p := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
|
||||
// when
|
||||
_, ce := converter.GetSnapshots(
|
||||
context.Background(),
|
||||
&pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfNotionParams{NotionParams: &pb.RpcObjectImportRequestNotionParams{ApiKey: "key"}},
|
||||
Type: model.Import_Markdown,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
},
|
||||
p,
|
||||
)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Notion), common.ErrNoObjectInIntegration))
|
||||
})
|
||||
}
|
||||
|
||||
func TestNotion_getUniqueProperties(t *testing.T) {
|
||||
t.Run("Page and Database have the same property - 1 unique item", func(t *testing.T) {
|
||||
// given
|
||||
|
|
|
@ -39,7 +39,6 @@ const (
|
|||
)
|
||||
|
||||
var ErrNotAnyBlockExtension = errors.New("not JSON or PB extension")
|
||||
var ErrWrongFormat = errors.New("wrong PB or JSON format")
|
||||
|
||||
type Pb struct {
|
||||
service *collection.Service
|
||||
|
@ -172,7 +171,7 @@ func (p *Pb) extractFiles(importPath string, importSource source.Source) error {
|
|||
return err
|
||||
}
|
||||
if importSource.CountFilesWithGivenExtensions([]string{".pb", ".json"}) == 0 {
|
||||
return common.ErrNoObjectsToImport
|
||||
return common.ErrorBySourceType(importSource)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -268,10 +267,10 @@ func (p *Pb) makeSnapshot(name, profileID, path string,
|
|||
if errors.Is(errGS, ErrNotAnyBlockExtension) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, errGS
|
||||
return nil, fmt.Errorf("%w: %s", common.ErrPbNotAnyBlockFormat, errGS.Error())
|
||||
}
|
||||
if valid := p.isSnapshotValid(snapshot); !valid {
|
||||
return nil, fmt.Errorf("snapshot is not valid")
|
||||
return nil, fmt.Errorf("%w: snapshot is not valid", common.ErrPbNotAnyBlockFormat)
|
||||
}
|
||||
id := uuid.New().String()
|
||||
id, err := p.normalizeSnapshot(snapshot, id, profileID, path, isMigration, pbFiles)
|
||||
|
@ -293,7 +292,7 @@ func (p *Pb) getSnapshotFromFile(rd io.ReadCloser, name string) (*pb.SnapshotWit
|
|||
snapshot := &pb.SnapshotWithType{}
|
||||
um := jsonpb.Unmarshaler{}
|
||||
if uErr := um.Unmarshal(rd, snapshot); uErr != nil {
|
||||
return nil, ErrWrongFormat
|
||||
return nil, fmt.Errorf("PB:GetSnapshot %w", uErr)
|
||||
}
|
||||
return snapshot, nil
|
||||
}
|
||||
|
@ -304,7 +303,7 @@ func (p *Pb) getSnapshotFromFile(rd io.ReadCloser, name string) (*pb.SnapshotWit
|
|||
return nil, err
|
||||
}
|
||||
if err = snapshot.Unmarshal(data); err != nil {
|
||||
return nil, ErrWrongFormat
|
||||
return nil, fmt.Errorf("PB:GetSnapshot %w", err)
|
||||
}
|
||||
return snapshot, nil
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import (
|
|||
"archive/zip"
|
||||
"bufio"
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
|
@ -15,9 +16,12 @@ import (
|
|||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/test"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
)
|
||||
|
||||
func Test_GetSnapshotsSuccess(t *testing.T) {
|
||||
|
@ -26,7 +30,7 @@ func Test_GetSnapshotsSuccess(t *testing.T) {
|
|||
defer os.RemoveAll(path)
|
||||
wr, err := newZipWriter(path)
|
||||
assert.NoError(t, err)
|
||||
f, err := os.Open("testdata/bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb")
|
||||
f, err := os.Open(filepath.Join("testdata", "bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb"))
|
||||
reader := bufio.NewReader(f)
|
||||
|
||||
assert.NoError(t, err)
|
||||
|
@ -43,7 +47,7 @@ func Test_GetSnapshotsSuccess(t *testing.T) {
|
|||
UpdateExistingObjects: false,
|
||||
Type: 0,
|
||||
Mode: 0,
|
||||
}, process.NewProgress(pb.ModelProcess_Import))
|
||||
}, process.NewNoOp())
|
||||
|
||||
assert.Nil(t, ce)
|
||||
assert.Len(t, res.Snapshots, 2)
|
||||
|
@ -63,7 +67,7 @@ func Test_GetSnapshotsFailedReadZip(t *testing.T) {
|
|||
UpdateExistingObjects: false,
|
||||
Type: 0,
|
||||
Mode: 0,
|
||||
}, process.NewProgress(pb.ModelProcess_Import))
|
||||
}, process.NewNoOp())
|
||||
|
||||
assert.NotNil(t, ce)
|
||||
}
|
||||
|
@ -74,7 +78,7 @@ func Test_GetSnapshotsFailedToGetSnapshot(t *testing.T) {
|
|||
defer os.RemoveAll(path)
|
||||
wr, err := newZipWriter(path)
|
||||
assert.NoError(t, err)
|
||||
f, err := os.Open("testdata/test.pb")
|
||||
f, err := os.Open(filepath.Join("testdata", "test.pb"))
|
||||
reader := bufio.NewReader(f)
|
||||
|
||||
assert.NoError(t, err)
|
||||
|
@ -90,16 +94,46 @@ func Test_GetSnapshotsFailedToGetSnapshot(t *testing.T) {
|
|||
UpdateExistingObjects: false,
|
||||
Type: 0,
|
||||
Mode: 0,
|
||||
}, process.NewProgress(pb.ModelProcess_Import))
|
||||
}, process.NewNoOp())
|
||||
|
||||
assert.NotNil(t, ce)
|
||||
assert.False(t, ce.IsEmpty())
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Pb), common.ErrPbNotAnyBlockFormat))
|
||||
}
|
||||
|
||||
func Test_GetSnapshotsEmptySnapshot(t *testing.T) {
|
||||
path, err := ioutil.TempDir("", "")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(path)
|
||||
wr, err := newZipWriter(path)
|
||||
assert.NoError(t, err)
|
||||
f, err := os.Open(filepath.Join("testdata", "emptysnapshot.pb.json"))
|
||||
reader := bufio.NewReader(f)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.NoError(t, wr.WriteFile("emptysnapshot.pb.json", reader))
|
||||
assert.NoError(t, wr.Close())
|
||||
|
||||
p := &Pb{}
|
||||
|
||||
_, ce := p.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfPbParams{PbParams: &pb.RpcObjectImportRequestPbParams{
|
||||
Path: []string{wr.Path()},
|
||||
}},
|
||||
UpdateExistingObjects: false,
|
||||
Type: 0,
|
||||
Mode: 0,
|
||||
}, process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}}))
|
||||
|
||||
assert.NotNil(t, ce)
|
||||
assert.False(t, ce.IsEmpty())
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Pb), common.ErrPbNotAnyBlockFormat))
|
||||
}
|
||||
|
||||
func Test_GetSnapshotsFailedToGetSnapshotForTwoFiles(t *testing.T) {
|
||||
p := &Pb{}
|
||||
|
||||
paths := []string{"testdata/bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb", "testdata/test.pb"}
|
||||
paths := []string{filepath.Join("testdata", "bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb"), filepath.Join("testdata", "test.pb")}
|
||||
// ALL_OR_NOTHING mode
|
||||
res, ce := p.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfPbParams{PbParams: &pb.RpcObjectImportRequestPbParams{
|
||||
|
@ -108,7 +142,7 @@ func Test_GetSnapshotsFailedToGetSnapshotForTwoFiles(t *testing.T) {
|
|||
UpdateExistingObjects: false,
|
||||
Type: 0,
|
||||
Mode: 0,
|
||||
}, process.NewProgress(pb.ModelProcess_Import))
|
||||
}, process.NewNoOp())
|
||||
|
||||
assert.NotNil(t, ce)
|
||||
assert.Nil(t, res)
|
||||
|
@ -121,7 +155,7 @@ func Test_GetSnapshotsFailedToGetSnapshotForTwoFiles(t *testing.T) {
|
|||
UpdateExistingObjects: false,
|
||||
Type: 0,
|
||||
Mode: 1,
|
||||
}, process.NewProgress(pb.ModelProcess_Import))
|
||||
}, process.NewNoOp())
|
||||
|
||||
assert.NotNil(t, ce)
|
||||
assert.NotNil(t, res.Snapshots)
|
||||
|
@ -132,7 +166,7 @@ func Test_GetSnapshotsFailedToGetSnapshotForTwoFiles(t *testing.T) {
|
|||
func Test_GetSnapshotsWithoutRootCollection(t *testing.T) {
|
||||
p := &Pb{}
|
||||
|
||||
path := "testdata/bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb"
|
||||
path := filepath.Join("testdata", "bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb")
|
||||
res, ce := p.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfPbParams{PbParams: &pb.RpcObjectImportRequestPbParams{
|
||||
Path: []string{path},
|
||||
|
@ -141,7 +175,7 @@ func Test_GetSnapshotsWithoutRootCollection(t *testing.T) {
|
|||
UpdateExistingObjects: false,
|
||||
Type: 0,
|
||||
Mode: 0,
|
||||
}, process.NewProgress(pb.ModelProcess_Import))
|
||||
}, process.NewNoOp())
|
||||
|
||||
assert.Nil(t, ce)
|
||||
assert.NotNil(t, res.Snapshots)
|
||||
|
@ -155,13 +189,13 @@ func Test_GetSnapshotsSkipFileWithoutExtension(t *testing.T) {
|
|||
wr, err := newZipWriter(path)
|
||||
assert.NoError(t, err)
|
||||
|
||||
f, err := os.Open("testdata/bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb")
|
||||
f, err := os.Open(filepath.Join("testdata", "bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb"))
|
||||
assert.NoError(t, err)
|
||||
reader := bufio.NewReader(f)
|
||||
|
||||
assert.NoError(t, wr.WriteFile("bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb", reader))
|
||||
|
||||
f, err = os.Open("testdata/test")
|
||||
f, err = os.Open(filepath.Join("testdata", "test"))
|
||||
assert.NoError(t, err)
|
||||
reader = bufio.NewReader(f)
|
||||
|
||||
|
@ -177,7 +211,7 @@ func Test_GetSnapshotsSkipFileWithoutExtension(t *testing.T) {
|
|||
UpdateExistingObjects: false,
|
||||
Type: 0,
|
||||
Mode: 1,
|
||||
}, process.NewProgress(pb.ModelProcess_Import))
|
||||
}, process.NewNoOp())
|
||||
|
||||
assert.Nil(t, ce)
|
||||
assert.NotNil(t, res.Snapshots)
|
||||
|
@ -187,6 +221,44 @@ func Test_GetSnapshotsSkipFileWithoutExtension(t *testing.T) {
|
|||
assert.Contains(t, res.Snapshots[1].FileName, rootCollectionName)
|
||||
}
|
||||
|
||||
func TestPb_GetSnapshots(t *testing.T) {
|
||||
t.Run("no objects in dir", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
p := &Pb{}
|
||||
|
||||
// when
|
||||
_, ce := p.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfPbParams{PbParams: &pb.RpcObjectImportRequestPbParams{
|
||||
Path: []string{dir},
|
||||
}},
|
||||
}, process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}}))
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Pb), common.ErrFileImportNoObjectsInDirectory))
|
||||
})
|
||||
t.Run("no objects in archive", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
p := &Pb{}
|
||||
zipPath := filepath.Join(dir, "empty.zip")
|
||||
err := test.CreateEmptyZip(t, zipPath)
|
||||
assert.Nil(t, err)
|
||||
|
||||
// when
|
||||
_, ce := p.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfPbParams{PbParams: &pb.RpcObjectImportRequestPbParams{
|
||||
Path: []string{zipPath},
|
||||
}},
|
||||
}, process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}}))
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Pb), common.ErrFileImportNoObjectsInZipArchive))
|
||||
})
|
||||
}
|
||||
|
||||
func newZipWriter(path string) (*zipWriter, error) {
|
||||
filename := filepath.Join(path, "Anytype"+strconv.FormatInt(rand.Int63(), 10)+".zip")
|
||||
f, err := os.Create(filename)
|
||||
|
|
3
core/block/import/pb/testdata/emptysnapshot.pb.json
vendored
Normal file
3
core/block/import/pb/testdata/emptysnapshot.pb.json
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"snapshot": {}
|
||||
}
|
|
@ -113,7 +113,7 @@ func (t *TXT) handleImportPath(p string, pathsCount int, allErrors *common.Conve
|
|||
}
|
||||
var numberOfFiles int
|
||||
if numberOfFiles = importSource.CountFilesWithGivenExtensions([]string{".txt"}); numberOfFiles == 0 {
|
||||
allErrors.Add(common.ErrNoObjectsToImport)
|
||||
allErrors.Add(common.ErrorBySourceType(importSource))
|
||||
return nil, nil
|
||||
}
|
||||
snapshots := make([]*common.Snapshot, 0, numberOfFiles)
|
||||
|
|
|
@ -3,11 +3,13 @@ package txt
|
|||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/test"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
|
@ -16,40 +18,82 @@ import (
|
|||
)
|
||||
|
||||
func TestTXT_GetSnapshots(t *testing.T) {
|
||||
h := &TXT{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
sn, err := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfTxtParams{
|
||||
TxtParams: &pb.RpcObjectImportRequestTxtParams{Path: []string{"testdata/test.txt", "testdata/test"}},
|
||||
},
|
||||
Type: 4,
|
||||
Mode: 1,
|
||||
}, p)
|
||||
t.Run("success", func(t *testing.T) {
|
||||
h := &TXT{}
|
||||
p := process.NewNoOp()
|
||||
sn, err := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfTxtParams{
|
||||
TxtParams: &pb.RpcObjectImportRequestTxtParams{Path: []string{filepath.Join("testdata", "test.txt"), filepath.Join("testdata", "test")}},
|
||||
},
|
||||
Type: 4,
|
||||
Mode: 1,
|
||||
}, p)
|
||||
|
||||
assert.NotNil(t, err)
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Txt), common.ErrNoObjectsToImport))
|
||||
assert.NotNil(t, sn)
|
||||
assert.Len(t, sn.Snapshots, 2)
|
||||
assert.Contains(t, sn.Snapshots[0].FileName, "test.txt")
|
||||
assert.NotEmpty(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"])
|
||||
assert.Equal(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"], pbtypes.String("test"))
|
||||
assert.NotNil(t, err)
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Txt), common.ErrFileImportNoObjectsInDirectory))
|
||||
assert.NotNil(t, sn)
|
||||
assert.Len(t, sn.Snapshots, 2)
|
||||
assert.Contains(t, sn.Snapshots[0].FileName, "test.txt")
|
||||
assert.NotEmpty(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"])
|
||||
assert.Equal(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"], pbtypes.String("test"))
|
||||
|
||||
assert.Contains(t, sn.Snapshots[1].FileName, rootCollectionName)
|
||||
assert.NotEmpty(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes)
|
||||
assert.Equal(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes[0], bundle.TypeKeyCollection.String())
|
||||
assert.Contains(t, sn.Snapshots[1].FileName, rootCollectionName)
|
||||
assert.NotEmpty(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes)
|
||||
assert.Equal(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes[0], bundle.TypeKeyCollection.String())
|
||||
|
||||
var (
|
||||
found bool
|
||||
text string
|
||||
)
|
||||
var (
|
||||
found bool
|
||||
text string
|
||||
)
|
||||
|
||||
for _, block := range sn.Snapshots[0].Snapshot.Data.GetBlocks() {
|
||||
if t, ok := block.Content.(*model.BlockContentOfText); ok {
|
||||
found = ok
|
||||
text = t.Text.GetText()
|
||||
for _, block := range sn.Snapshots[0].Snapshot.Data.GetBlocks() {
|
||||
if t, ok := block.Content.(*model.BlockContentOfText); ok {
|
||||
found = ok
|
||||
text = t.Text.GetText()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert.Equal(t, text, "test")
|
||||
assert.True(t, found)
|
||||
assert.Equal(t, text, "test")
|
||||
assert.True(t, found)
|
||||
})
|
||||
t.Run("no objects in dir", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
h := &TXT{}
|
||||
p := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
|
||||
// when
|
||||
_, err := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfTxtParams{
|
||||
TxtParams: &pb.RpcObjectImportRequestTxtParams{Path: []string{dir}},
|
||||
},
|
||||
Type: 4,
|
||||
Mode: 1,
|
||||
}, p)
|
||||
// then
|
||||
assert.NotNil(t, err)
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Pb), common.ErrFileImportNoObjectsInDirectory))
|
||||
})
|
||||
t.Run("no objects in archive", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
zipPath := filepath.Join(dir, "empty.zip")
|
||||
err := test.CreateEmptyZip(t, zipPath)
|
||||
assert.Nil(t, err)
|
||||
|
||||
h := &TXT{}
|
||||
p := process.NewProgress(&pb.ModelProcessMessageOfImport{Import: &pb.ModelProcessImport{}})
|
||||
|
||||
// when
|
||||
_, ce := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfTxtParams{
|
||||
TxtParams: &pb.RpcObjectImportRequestTxtParams{Path: []string{zipPath}},
|
||||
},
|
||||
Type: 4,
|
||||
Mode: 1,
|
||||
}, p)
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Pb), common.ErrFileImportNoObjectsInZipArchive))
|
||||
})
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ type Importer interface {
|
|||
Import(ctx context.Context, importRequest *ImportRequest) *ImportResponse
|
||||
|
||||
ListImports(req *pb.RpcObjectImportListRequest) ([]*pb.RpcObjectImportListImportResponse, error)
|
||||
ImportWeb(ctx context.Context, req *pb.RpcObjectImportRequest) (string, *types.Struct, error)
|
||||
ImportWeb(ctx context.Context, req *ImportRequest) (string, *types.Struct, error)
|
||||
// nolint: lll
|
||||
ValidateNotionToken(ctx context.Context, req *pb.RpcObjectImportNotionValidateTokenRequest) (pb.RpcObjectImportNotionValidateTokenResponseErrorCode, error)
|
||||
}
|
||||
|
|
|
@ -168,7 +168,7 @@ func (_c *MockCache_CreateTreeObject_Call) RunAndReturn(run func(context.Context
|
|||
}
|
||||
|
||||
// CreateTreeObjectWithPayload provides a mock function with given fields: ctx, payload, initFunc
|
||||
func (_m *MockCache) CreateTreeObjectWithPayload(ctx context.Context, payload treestorage.TreeStorageCreatePayload, initFunc func(string) *smartblock.InitContext) (smartblock.SmartBlock, error) {
|
||||
func (_m *MockCache) CreateTreeObjectWithPayload(ctx context.Context, payload treestorage.TreeStorageCreatePayload, initFunc objectcache.InitFunc) (smartblock.SmartBlock, error) {
|
||||
ret := _m.Called(ctx, payload, initFunc)
|
||||
|
||||
if len(ret) == 0 {
|
||||
|
@ -177,10 +177,10 @@ func (_m *MockCache) CreateTreeObjectWithPayload(ctx context.Context, payload tr
|
|||
|
||||
var r0 smartblock.SmartBlock
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, treestorage.TreeStorageCreatePayload, func(string) *smartblock.InitContext) (smartblock.SmartBlock, error)); ok {
|
||||
if rf, ok := ret.Get(0).(func(context.Context, treestorage.TreeStorageCreatePayload, objectcache.InitFunc) (smartblock.SmartBlock, error)); ok {
|
||||
return rf(ctx, payload, initFunc)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(context.Context, treestorage.TreeStorageCreatePayload, func(string) *smartblock.InitContext) smartblock.SmartBlock); ok {
|
||||
if rf, ok := ret.Get(0).(func(context.Context, treestorage.TreeStorageCreatePayload, objectcache.InitFunc) smartblock.SmartBlock); ok {
|
||||
r0 = rf(ctx, payload, initFunc)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
|
@ -188,7 +188,7 @@ func (_m *MockCache) CreateTreeObjectWithPayload(ctx context.Context, payload tr
|
|||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(context.Context, treestorage.TreeStorageCreatePayload, func(string) *smartblock.InitContext) error); ok {
|
||||
if rf, ok := ret.Get(1).(func(context.Context, treestorage.TreeStorageCreatePayload, objectcache.InitFunc) error); ok {
|
||||
r1 = rf(ctx, payload, initFunc)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
|
@ -205,14 +205,14 @@ type MockCache_CreateTreeObjectWithPayload_Call struct {
|
|||
// CreateTreeObjectWithPayload is a helper method to define mock.On call
|
||||
// - ctx context.Context
|
||||
// - payload treestorage.TreeStorageCreatePayload
|
||||
// - initFunc func(string) *smartblock.InitContext
|
||||
// - initFunc objectcache.InitFunc
|
||||
func (_e *MockCache_Expecter) CreateTreeObjectWithPayload(ctx interface{}, payload interface{}, initFunc interface{}) *MockCache_CreateTreeObjectWithPayload_Call {
|
||||
return &MockCache_CreateTreeObjectWithPayload_Call{Call: _e.mock.On("CreateTreeObjectWithPayload", ctx, payload, initFunc)}
|
||||
}
|
||||
|
||||
func (_c *MockCache_CreateTreeObjectWithPayload_Call) Run(run func(ctx context.Context, payload treestorage.TreeStorageCreatePayload, initFunc func(string) *smartblock.InitContext)) *MockCache_CreateTreeObjectWithPayload_Call {
|
||||
func (_c *MockCache_CreateTreeObjectWithPayload_Call) Run(run func(ctx context.Context, payload treestorage.TreeStorageCreatePayload, initFunc objectcache.InitFunc)) *MockCache_CreateTreeObjectWithPayload_Call {
|
||||
_c.Call.Run(func(args mock.Arguments) {
|
||||
run(args[0].(context.Context), args[1].(treestorage.TreeStorageCreatePayload), args[2].(func(string) *smartblock.InitContext))
|
||||
run(args[0].(context.Context), args[1].(treestorage.TreeStorageCreatePayload), args[2].(objectcache.InitFunc))
|
||||
})
|
||||
return _c
|
||||
}
|
||||
|
@ -222,7 +222,7 @@ func (_c *MockCache_CreateTreeObjectWithPayload_Call) Return(sb smartblock.Smart
|
|||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockCache_CreateTreeObjectWithPayload_Call) RunAndReturn(run func(context.Context, treestorage.TreeStorageCreatePayload, func(string) *smartblock.InitContext) (smartblock.SmartBlock, error)) *MockCache_CreateTreeObjectWithPayload_Call {
|
||||
func (_c *MockCache_CreateTreeObjectWithPayload_Call) RunAndReturn(run func(context.Context, treestorage.TreeStorageCreatePayload, objectcache.InitFunc) (smartblock.SmartBlock, error)) *MockCache_CreateTreeObjectWithPayload_Call {
|
||||
_c.Call.Return(run)
|
||||
return _c
|
||||
}
|
||||
|
|
|
@ -3,12 +3,10 @@ package objectcreator
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/gogo/protobuf/types"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/state"
|
||||
"github.com/anyproto/anytype-heart/core/block/object/payloadcreator"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/core/smartblock"
|
||||
|
@ -17,36 +15,11 @@ import (
|
|||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
)
|
||||
|
||||
func (s *service) createChat(ctx context.Context, space clientspace.Space, details *types.Struct) (string, *types.Struct, error) {
|
||||
payload, err := space.CreateTreePayload(ctx, payloadcreator.PayloadCreationParams{
|
||||
Time: time.Now(),
|
||||
SmartblockType: smartblock.SmartBlockTypeChatObject,
|
||||
})
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("create tree payload: %w", err)
|
||||
}
|
||||
|
||||
createState := state.NewDoc(payload.RootRawChange.Id, nil).(*state.State)
|
||||
details.Fields[bundle.RelationKeyLayout.String()] = pbtypes.Int64(int64(model.ObjectType_chat))
|
||||
createState.SetDetails(details)
|
||||
err = s.addChatDerivedObject(ctx, space, createState, payload.RootRawChange.Id)
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("add chat derived object: %w", err)
|
||||
}
|
||||
|
||||
id, newDetails, err := s.CreateSmartBlockFromStateInSpaceWithOptions(ctx, space, []domain.TypeKey{bundle.TypeKeyChat}, createState, WithPayload(&payload))
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("create smartblock from state: %w", err)
|
||||
}
|
||||
|
||||
return id, newDetails, nil
|
||||
}
|
||||
|
||||
func (s *service) addChatDerivedObject(ctx context.Context, space clientspace.Space, st *state.State, chatObjectId string) error {
|
||||
func (s *service) AddChatDerivedObject(ctx context.Context, space clientspace.Space, chatObjectId string) (chatId string, err error) {
|
||||
chatDetails := &types.Struct{Fields: map[string]*types.Value{}}
|
||||
chatUniqueKey, err := domain.NewUniqueKey(smartblock.SmartBlockTypeChatDerivedObject, chatObjectId)
|
||||
if err != nil {
|
||||
return fmt.Errorf("create payload: %w", err)
|
||||
return "", fmt.Errorf("create payload: %w", err)
|
||||
}
|
||||
chatDetails.Fields[bundle.RelationKeyUniqueKey.String()] = pbtypes.String(chatUniqueKey.Marshal())
|
||||
|
||||
|
@ -55,14 +28,12 @@ func (s *service) addChatDerivedObject(ctx context.Context, space clientspace.Sp
|
|||
Details: chatDetails,
|
||||
}
|
||||
|
||||
chatId, _, err := s.createObjectInSpace(ctx, space, chatReq)
|
||||
chatId, _, err = s.createObjectInSpace(ctx, space, chatReq)
|
||||
if err != nil {
|
||||
return fmt.Errorf("create object: %w", err)
|
||||
return "", fmt.Errorf("create object: %w", err)
|
||||
}
|
||||
|
||||
st.SetDetailAndBundledRelation(bundle.RelationKeyChatId, pbtypes.String(chatId))
|
||||
st.SetDetailAndBundledRelation(bundle.RelationKeyHasChat, pbtypes.Bool(true))
|
||||
return nil
|
||||
return chatId, nil
|
||||
}
|
||||
|
||||
func (s *service) createChatDerived(ctx context.Context, space clientspace.Space, details *types.Struct) (string, *types.Struct, error) {
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
"github.com/anyproto/anytype-heart/core/block/editor/lastused"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/state"
|
||||
"github.com/anyproto/anytype-heart/core/block/restriction"
|
||||
"github.com/anyproto/anytype-heart/core/block/source"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
|
@ -20,6 +21,7 @@ import (
|
|||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
"github.com/anyproto/anytype-heart/space"
|
||||
"github.com/anyproto/anytype-heart/space/clientspace"
|
||||
"github.com/anyproto/anytype-heart/util/dateutil"
|
||||
"github.com/anyproto/anytype-heart/util/internalflag"
|
||||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
)
|
||||
|
@ -49,6 +51,7 @@ type Service interface {
|
|||
|
||||
CreateSmartBlockFromState(ctx context.Context, spaceID string, objectTypeKeys []domain.TypeKey, createState *state.State) (id string, newDetails *types.Struct, err error)
|
||||
CreateSmartBlockFromStateInSpace(ctx context.Context, space clientspace.Space, objectTypeKeys []domain.TypeKey, createState *state.State) (id string, newDetails *types.Struct, err error)
|
||||
AddChatDerivedObject(ctx context.Context, space clientspace.Space, chatObjectId string) (chatId string, err error)
|
||||
|
||||
InstallBundledObjects(ctx context.Context, space clientspace.Space, sourceObjectIds []string, isNewSpace bool) (ids []string, objects []*types.Struct, err error)
|
||||
app.Component
|
||||
|
@ -149,12 +152,16 @@ func (s *service) createObjectInSpace(
|
|||
return s.createRelation(ctx, space, details)
|
||||
case bundle.TypeKeyRelationOption:
|
||||
return s.createRelationOption(ctx, space, details)
|
||||
case bundle.TypeKeyChat:
|
||||
return s.createChat(ctx, space, details)
|
||||
case bundle.TypeKeyChatDerived:
|
||||
return s.createChatDerived(ctx, space, details)
|
||||
case bundle.TypeKeyFile:
|
||||
return "", nil, fmt.Errorf("files must be created via fileobject service")
|
||||
case bundle.TypeKeyTemplate:
|
||||
if pbtypes.GetString(details, bundle.RelationKeyTargetObjectType.String()) == "" {
|
||||
return "", nil, fmt.Errorf("cannot create template without target object")
|
||||
}
|
||||
case bundle.TypeKeyDate:
|
||||
return buildDateObject(space, details)
|
||||
}
|
||||
|
||||
return s.createObjectFromTemplate(ctx, space, []domain.TypeKey{req.ObjectTypeKey}, details, req.TemplateId)
|
||||
|
@ -173,3 +180,33 @@ func (s *service) createObjectFromTemplate(
|
|||
}
|
||||
return s.CreateSmartBlockFromStateInSpace(ctx, space, objectTypeKeys, createState)
|
||||
}
|
||||
|
||||
// buildDateObject does not create real date object. It just builds date object details
|
||||
func buildDateObject(space clientspace.Space, details *types.Struct) (string, *types.Struct, error) {
|
||||
name := pbtypes.GetString(details, bundle.RelationKeyName.String())
|
||||
id, err := dateutil.DateNameToId(name)
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("failed to build date object, as its name is invalid: %w", err)
|
||||
}
|
||||
|
||||
typeId, err := space.GetTypeIdByKey(context.Background(), bundle.TypeKeyDate)
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("failed to find Date type to build Date object: %w", err)
|
||||
}
|
||||
|
||||
dateSource := source.NewDate(source.DateSourceParams{
|
||||
Id: domain.FullID{
|
||||
ObjectID: id,
|
||||
SpaceID: space.Id(),
|
||||
},
|
||||
DateObjectTypeId: typeId,
|
||||
})
|
||||
|
||||
detailsGetter, ok := dateSource.(source.SourceIdEndodedDetails)
|
||||
if !ok {
|
||||
return "", nil, fmt.Errorf("date object does not implement DetailsFromId")
|
||||
}
|
||||
|
||||
details, err = detailsGetter.DetailsFromId()
|
||||
return id, details, err
|
||||
}
|
||||
|
|
156
core/block/object/objectcreator/creator_test.go
Normal file
156
core/block/object/objectcreator/creator_test.go
Normal file
|
@ -0,0 +1,156 @@
|
|||
package objectcreator
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gogo/protobuf/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/lastused/mock_lastused"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/smartblock/smarttest"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/state"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/space/clientspace"
|
||||
"github.com/anyproto/anytype-heart/space/clientspace/mock_clientspace"
|
||||
"github.com/anyproto/anytype-heart/space/mock_space"
|
||||
"github.com/anyproto/anytype-heart/util/dateutil"
|
||||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
)
|
||||
|
||||
const spaceId = "spc1"
|
||||
|
||||
type fixture struct {
|
||||
spaceService *mock_space.MockService
|
||||
spc *mock_clientspace.MockSpace
|
||||
templateService *testTemplateService
|
||||
lastUsedService *mock_lastused.MockObjectUsageUpdater
|
||||
service Service
|
||||
}
|
||||
|
||||
func newFixture(t *testing.T) *fixture {
|
||||
spaceService := mock_space.NewMockService(t)
|
||||
spc := mock_clientspace.NewMockSpace(t)
|
||||
|
||||
templateSvc := &testTemplateService{}
|
||||
lastUsedSvc := mock_lastused.NewMockObjectUsageUpdater(t)
|
||||
|
||||
s := &service{
|
||||
spaceService: spaceService,
|
||||
templateService: templateSvc,
|
||||
lastUsedUpdater: lastUsedSvc,
|
||||
}
|
||||
|
||||
return &fixture{
|
||||
spaceService: spaceService,
|
||||
spc: spc,
|
||||
templateService: templateSvc,
|
||||
lastUsedService: lastUsedSvc,
|
||||
service: s,
|
||||
}
|
||||
}
|
||||
|
||||
type testTemplateService struct {
|
||||
templates map[string]*state.State
|
||||
}
|
||||
|
||||
func (tts *testTemplateService) CreateTemplateStateWithDetails(templateId string, details *types.Struct) (*state.State, error) {
|
||||
if tts.templates != nil {
|
||||
if st, found := tts.templates[templateId]; found {
|
||||
return st, nil
|
||||
}
|
||||
}
|
||||
return state.NewDoc(templateId, nil).NewState(), nil
|
||||
}
|
||||
|
||||
func (tts *testTemplateService) TemplateCloneInSpace(space clientspace.Space, id string) (templateId string, err error) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func TestService_CreateObject(t *testing.T) {
|
||||
t.Run("template creation", func(t *testing.T) {
|
||||
// given
|
||||
sb := smarttest.New("test")
|
||||
f := newFixture(t)
|
||||
f.spaceService.EXPECT().Get(mock.Anything, mock.Anything).Return(f.spc, nil)
|
||||
f.spc.EXPECT().CreateTreeObject(mock.Anything, mock.Anything).Return(sb, nil)
|
||||
f.spc.EXPECT().Id().Return(spaceId)
|
||||
f.lastUsedService.EXPECT().UpdateLastUsedDate(spaceId, bundle.TypeKeyTemplate, mock.Anything).Return()
|
||||
|
||||
// when
|
||||
id, _, err := f.service.CreateObject(context.Background(), spaceId, CreateObjectRequest{
|
||||
Details: &types.Struct{Fields: map[string]*types.Value{
|
||||
bundle.RelationKeyTargetObjectType.String(): pbtypes.String(bundle.TypeKeyTask.URL()),
|
||||
}},
|
||||
ObjectTypeKey: bundle.TypeKeyTemplate,
|
||||
})
|
||||
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "test", id)
|
||||
})
|
||||
|
||||
t.Run("template creation - no target type", func(t *testing.T) {
|
||||
// given
|
||||
f := newFixture(t)
|
||||
f.spaceService.EXPECT().Get(mock.Anything, mock.Anything).Return(f.spc, nil)
|
||||
|
||||
// when
|
||||
_, _, err := f.service.CreateObject(context.Background(), spaceId, CreateObjectRequest{
|
||||
ObjectTypeKey: bundle.TypeKeyTemplate,
|
||||
})
|
||||
|
||||
// then
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("date object creation", func(t *testing.T) {
|
||||
// given
|
||||
f := newFixture(t)
|
||||
f.spaceService.EXPECT().Get(mock.Anything, mock.Anything).Return(f.spc, nil)
|
||||
f.spc.EXPECT().Id().Return(spaceId)
|
||||
f.spc.EXPECT().GetTypeIdByKey(mock.Anything, mock.Anything).RunAndReturn(func(ctx context.Context, key domain.TypeKey) (string, error) {
|
||||
assert.Equal(t, bundle.TypeKeyDate, key)
|
||||
return bundle.TypeKeyDate.URL(), nil
|
||||
})
|
||||
ts := time.Now()
|
||||
name := dateutil.TimeToDateName(ts)
|
||||
|
||||
// when
|
||||
id, details, err := f.service.CreateObject(context.Background(), spaceId, CreateObjectRequest{
|
||||
ObjectTypeKey: bundle.TypeKeyDate,
|
||||
Details: &types.Struct{Fields: map[string]*types.Value{
|
||||
bundle.RelationKeyName.String(): pbtypes.String(name),
|
||||
}},
|
||||
})
|
||||
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, strings.HasPrefix(id, dateutil.TimeToShortDateId(ts)))
|
||||
assert.Equal(t, spaceId, pbtypes.GetString(details, bundle.RelationKeySpaceId.String()))
|
||||
assert.Equal(t, bundle.TypeKeyDate.URL(), pbtypes.GetString(details, bundle.RelationKeyType.String()))
|
||||
})
|
||||
|
||||
t.Run("date object creation - invalid name", func(t *testing.T) {
|
||||
// given
|
||||
f := newFixture(t)
|
||||
f.spaceService.EXPECT().Get(mock.Anything, mock.Anything).Return(f.spc, nil)
|
||||
ts := time.Now()
|
||||
name := ts.Format(time.RFC3339)
|
||||
|
||||
// when
|
||||
_, _, err := f.service.CreateObject(context.Background(), spaceId, CreateObjectRequest{
|
||||
ObjectTypeKey: bundle.TypeKeyDate,
|
||||
Details: &types.Struct{Fields: map[string]*types.Value{
|
||||
bundle.RelationKeyName.String(): pbtypes.String(name),
|
||||
}},
|
||||
})
|
||||
|
||||
// then
|
||||
assert.Error(t, err)
|
||||
})
|
||||
}
|
|
@ -32,6 +32,64 @@ func (_m *MockService) EXPECT() *MockService_Expecter {
|
|||
return &MockService_Expecter{mock: &_m.Mock}
|
||||
}
|
||||
|
||||
// AddChatDerivedObject provides a mock function with given fields: ctx, space, chatObjectId
|
||||
func (_m *MockService) AddChatDerivedObject(ctx context.Context, space clientspace.Space, chatObjectId string) (string, error) {
|
||||
ret := _m.Called(ctx, space, chatObjectId)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for AddChatDerivedObject")
|
||||
}
|
||||
|
||||
var r0 string
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, clientspace.Space, string) (string, error)); ok {
|
||||
return rf(ctx, space, chatObjectId)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(context.Context, clientspace.Space, string) string); ok {
|
||||
r0 = rf(ctx, space, chatObjectId)
|
||||
} else {
|
||||
r0 = ret.Get(0).(string)
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(context.Context, clientspace.Space, string) error); ok {
|
||||
r1 = rf(ctx, space, chatObjectId)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// MockService_AddChatDerivedObject_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddChatDerivedObject'
|
||||
type MockService_AddChatDerivedObject_Call struct {
|
||||
*mock.Call
|
||||
}
|
||||
|
||||
// AddChatDerivedObject is a helper method to define mock.On call
|
||||
// - ctx context.Context
|
||||
// - space clientspace.Space
|
||||
// - chatObjectId string
|
||||
func (_e *MockService_Expecter) AddChatDerivedObject(ctx interface{}, space interface{}, chatObjectId interface{}) *MockService_AddChatDerivedObject_Call {
|
||||
return &MockService_AddChatDerivedObject_Call{Call: _e.mock.On("AddChatDerivedObject", ctx, space, chatObjectId)}
|
||||
}
|
||||
|
||||
func (_c *MockService_AddChatDerivedObject_Call) Run(run func(ctx context.Context, space clientspace.Space, chatObjectId string)) *MockService_AddChatDerivedObject_Call {
|
||||
_c.Call.Run(func(args mock.Arguments) {
|
||||
run(args[0].(context.Context), args[1].(clientspace.Space), args[2].(string))
|
||||
})
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockService_AddChatDerivedObject_Call) Return(chatId string, err error) *MockService_AddChatDerivedObject_Call {
|
||||
_c.Call.Return(chatId, err)
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockService_AddChatDerivedObject_Call) RunAndReturn(run func(context.Context, clientspace.Space, string) (string, error)) *MockService_AddChatDerivedObject_Call {
|
||||
_c.Call.Return(run)
|
||||
return _c
|
||||
}
|
||||
|
||||
// CreateObject provides a mock function with given fields: ctx, spaceID, req
|
||||
func (_m *MockService) CreateObject(ctx context.Context, spaceID string, req objectcreator.CreateObjectRequest) (string, *types.Struct, error) {
|
||||
ret := _m.Called(ctx, spaceID, req)
|
||||
|
|
|
@ -13,9 +13,9 @@ import (
|
|||
"github.com/anyproto/anytype-heart/core/block/simple"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/addr"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/logging"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
"github.com/anyproto/anytype-heart/util/dateutil"
|
||||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
)
|
||||
|
||||
|
@ -152,7 +152,7 @@ func collectIdsFromDetail(rel *model.RelationLink, det *types.Struct, flags Flag
|
|||
if relInt > 0 {
|
||||
t := time.Unix(relInt, 0)
|
||||
t = t.In(time.Local)
|
||||
ids = append(ids, addr.TimeToID(t))
|
||||
ids = append(ids, dateutil.TimeToDateId(t))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
|
@ -162,10 +162,9 @@ func (t *treeSyncer) SyncAll(ctx context.Context, p peer.Peer, existing, missing
|
|||
defer t.Unlock()
|
||||
peerId := p.Id()
|
||||
isResponsible := slices.Contains(t.nodeConf.NodeIds(t.spaceId), peerId)
|
||||
existing = lo.Filter(existing, func(id string, index int) bool {
|
||||
t.sendSyncEvents(lo.Filter(existing, func(id string, index int) bool {
|
||||
return id != t.spaceSettingsId
|
||||
})
|
||||
t.sendSyncEvents(existing, missing, isResponsible)
|
||||
}), missing, isResponsible)
|
||||
reqExec, exists := t.requestPools[peerId]
|
||||
if !exists {
|
||||
reqExec = newExecutor(t.requests, 0)
|
||||
|
|
|
@ -12,6 +12,7 @@ import (
|
|||
"github.com/anyproto/any-sync/commonspace/object/tree/synctree/mock_synctree"
|
||||
"github.com/anyproto/any-sync/commonspace/object/treemanager/mock_treemanager"
|
||||
"github.com/anyproto/any-sync/commonspace/spacestorage/mock_spacestorage"
|
||||
"github.com/anyproto/any-sync/net/peer"
|
||||
"github.com/anyproto/any-sync/net/rpc/rpctest"
|
||||
"github.com/anyproto/any-sync/nodeconf/mock_nodeconf"
|
||||
"github.com/stretchr/testify/mock"
|
||||
|
@ -155,10 +156,17 @@ func TestTreeSyncer(t *testing.T) {
|
|||
fx.syncStatus.EXPECT().RemoveAllExcept(peerId, mock.Anything).RunAndReturn(func(s string, strings []string) {
|
||||
require.Empty(t, strings)
|
||||
})
|
||||
ch := make(chan struct{})
|
||||
fx.treeManager.EXPECT().GetTree(gomock.Any(), spaceId, "spaceSettingsId").Return(fx.existingMock, nil)
|
||||
fx.existingMock.EXPECT().SyncWithPeer(gomock.Any(), pr).DoAndReturn(func(ctx context.Context, peer peer.Peer) error {
|
||||
close(ch)
|
||||
return nil
|
||||
})
|
||||
|
||||
fx.StartSync()
|
||||
err := fx.SyncAll(context.Background(), pr, []string{"spaceSettingsId"}, nil)
|
||||
require.NoError(t, err)
|
||||
<-ch
|
||||
fx.Close(ctx)
|
||||
})
|
||||
|
||||
|
|
|
@ -29,12 +29,12 @@ type notificationProcess struct {
|
|||
notificationService NotificationService
|
||||
}
|
||||
|
||||
func NewNotificationProcess(pbType pb.ModelProcessType, notificationService NotificationService) Notificationable {
|
||||
func NewNotificationProcess(processMessage pb.IsModelProcessMessage, notificationService NotificationService) Notificationable {
|
||||
return ¬ificationProcess{progress: &progress{
|
||||
id: bson.NewObjectId().Hex(),
|
||||
done: make(chan struct{}),
|
||||
cancel: make(chan struct{}),
|
||||
pType: pbType,
|
||||
id: bson.NewObjectId().Hex(),
|
||||
done: make(chan struct{}),
|
||||
cancel: make(chan struct{}),
|
||||
processMessage: processMessage,
|
||||
}, notificationService: notificationService}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,12 +22,12 @@ type Progress interface {
|
|||
TryStep(delta int64) error
|
||||
}
|
||||
|
||||
func NewProgress(pType pb.ModelProcessType) Progress {
|
||||
func NewProgress(processMessage pb.IsModelProcessMessage) Progress {
|
||||
return &progress{
|
||||
id: bson.NewObjectId().Hex(),
|
||||
done: make(chan struct{}),
|
||||
cancel: make(chan struct{}),
|
||||
pType: pType,
|
||||
id: bson.NewObjectId().Hex(),
|
||||
done: make(chan struct{}),
|
||||
cancel: make(chan struct{}),
|
||||
processMessage: processMessage,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -36,13 +36,15 @@ type progress struct {
|
|||
done, cancel chan struct{}
|
||||
totalCount, doneCount int64
|
||||
|
||||
pType pb.ModelProcessType
|
||||
pMessage string
|
||||
m sync.Mutex
|
||||
processMessage pb.IsModelProcessMessage
|
||||
pMessage string
|
||||
m sync.Mutex
|
||||
|
||||
isCancelled bool
|
||||
isDone bool
|
||||
isFinishedWithError bool
|
||||
|
||||
err error
|
||||
}
|
||||
|
||||
func (p *progress) SetTotal(total int64) {
|
||||
|
@ -88,6 +90,7 @@ func (p *progress) Finish(err error) {
|
|||
}
|
||||
if err != nil {
|
||||
p.isFinishedWithError = true
|
||||
p.err = err
|
||||
}
|
||||
close(p.done)
|
||||
p.isDone = true
|
||||
|
@ -111,12 +114,17 @@ func (p *progress) Cancel() (err error) {
|
|||
|
||||
func (p *progress) Info() pb.ModelProcess {
|
||||
state := pb.ModelProcess_Running
|
||||
var errDescription string
|
||||
select {
|
||||
case <-p.done:
|
||||
state = pb.ModelProcess_Done
|
||||
if p.isFinishedWithError {
|
||||
errDescription = p.err.Error()
|
||||
state = pb.ModelProcess_Error
|
||||
} else {
|
||||
p.SetDone(atomic.LoadInt64(&p.totalCount))
|
||||
}
|
||||
return p.makeInfo(state, errDescription)
|
||||
default:
|
||||
}
|
||||
select {
|
||||
|
@ -124,17 +132,22 @@ func (p *progress) Info() pb.ModelProcess {
|
|||
state = pb.ModelProcess_Canceled
|
||||
default:
|
||||
}
|
||||
return p.makeInfo(state, errDescription)
|
||||
}
|
||||
|
||||
func (p *progress) makeInfo(state pb.ModelProcessState, errDescription string) pb.ModelProcess {
|
||||
p.m.Lock()
|
||||
defer p.m.Unlock()
|
||||
return pb.ModelProcess{
|
||||
Id: p.id,
|
||||
Type: p.pType,
|
||||
State: state,
|
||||
Progress: &pb.ModelProcessProgress{
|
||||
Total: atomic.LoadInt64(&p.totalCount),
|
||||
Done: atomic.LoadInt64(&p.doneCount),
|
||||
Message: p.pMessage,
|
||||
},
|
||||
Error: errDescription,
|
||||
Message: p.processMessage,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -180,13 +180,13 @@ func (p *queue) Info() pb.ModelProcess {
|
|||
defer p.m.Unlock()
|
||||
return pb.ModelProcess{
|
||||
Id: p.id,
|
||||
Type: p.info.Type,
|
||||
State: p.state,
|
||||
Progress: &pb.ModelProcessProgress{
|
||||
Total: atomic.LoadInt64(&p.pTotal),
|
||||
Done: atomic.LoadInt64(&p.pDone),
|
||||
Message: p.message,
|
||||
},
|
||||
Message: p.info.Message,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,11 +34,15 @@ type Service interface {
|
|||
Cancel(id string) (err error)
|
||||
// NewQueue creates new queue with given workers count
|
||||
NewQueue(info pb.ModelProcess, workers int) Queue
|
||||
// Subscribe remove session from the map of disabled sessions
|
||||
Subscribe(token string)
|
||||
// Unsubscribe add session to the map of disabled sessions
|
||||
Unsubscribe(token string)
|
||||
app.ComponentRunnable
|
||||
}
|
||||
|
||||
func New() Service {
|
||||
return &service{}
|
||||
return &service{disabledProcessEvent: make(map[string]struct{})}
|
||||
}
|
||||
|
||||
type service struct {
|
||||
|
@ -46,6 +50,9 @@ type service struct {
|
|||
eventSender event.Sender
|
||||
waiters map[string]chan struct{}
|
||||
m sync.Mutex
|
||||
|
||||
disabledProcessEvent map[string]struct{}
|
||||
sessionMu sync.Mutex
|
||||
}
|
||||
|
||||
func (s *service) Init(a *app.App) (err error) {
|
||||
|
@ -91,7 +98,7 @@ func (s *service) monitor(p Process) {
|
|||
case <-ticker.C:
|
||||
info := p.Info()
|
||||
if !infoEquals(info, prevInfo) {
|
||||
s.eventSender.Broadcast(&pb.Event{
|
||||
s.eventSender.BroadcastExceptSessions(&pb.Event{
|
||||
Messages: []*pb.EventMessage{
|
||||
{
|
||||
Value: &pb.EventMessageValueOfProcessUpdate{
|
||||
|
@ -101,7 +108,7 @@ func (s *service) monitor(p Process) {
|
|||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}, s.getExcludedSessions())
|
||||
prevInfo = info
|
||||
}
|
||||
case <-p.Done():
|
||||
|
@ -172,6 +179,28 @@ func (s *service) Close(ctx context.Context) (err error) {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *service) Subscribe(token string) {
|
||||
s.sessionMu.Lock()
|
||||
defer s.sessionMu.Unlock()
|
||||
delete(s.disabledProcessEvent, token)
|
||||
}
|
||||
|
||||
func (s *service) Unsubscribe(token string) {
|
||||
s.sessionMu.Lock()
|
||||
defer s.sessionMu.Unlock()
|
||||
s.disabledProcessEvent[token] = struct{}{}
|
||||
}
|
||||
|
||||
func (s *service) getExcludedSessions() []string {
|
||||
s.sessionMu.Lock()
|
||||
defer s.sessionMu.Unlock()
|
||||
tokens := make([]string, 0, len(s.disabledProcessEvent))
|
||||
for token := range s.disabledProcessEvent {
|
||||
tokens = append(tokens, token)
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
func infoEquals(i1, i2 pb.ModelProcess) bool {
|
||||
return reflect.DeepEqual(i1, i2)
|
||||
}
|
||||
|
|
|
@ -59,6 +59,47 @@ func TestService_Cancel(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestService_Subscribe(t *testing.T) {
|
||||
t.Run("remove non existing token from excluded sessions", func(t *testing.T) {
|
||||
// given
|
||||
s := New()
|
||||
|
||||
// when
|
||||
s.Subscribe("test")
|
||||
|
||||
// then
|
||||
service := s.(*service)
|
||||
assert.Empty(t, service.disabledProcessEvent["test"])
|
||||
})
|
||||
t.Run("remove existing token from excluded sessions", func(t *testing.T) {
|
||||
// given
|
||||
s := New()
|
||||
|
||||
// when
|
||||
service := s.(*service)
|
||||
service.disabledProcessEvent["test"] = struct{}{}
|
||||
s.Subscribe("test")
|
||||
|
||||
// then
|
||||
assert.Empty(t, service.disabledProcessEvent["test"])
|
||||
})
|
||||
}
|
||||
|
||||
func TestService_Unsubscribe(t *testing.T) {
|
||||
t.Run("add disabled session", func(t *testing.T) {
|
||||
// given
|
||||
s := New()
|
||||
|
||||
// when
|
||||
s.Unsubscribe("test")
|
||||
|
||||
// then
|
||||
service := s.(*service)
|
||||
_, ok := service.disabledProcessEvent["test"]
|
||||
assert.True(t, ok)
|
||||
})
|
||||
}
|
||||
|
||||
func newTestProcess(id string) *testProcess {
|
||||
return &testProcess{
|
||||
id: id,
|
||||
|
|
|
@ -24,7 +24,7 @@ var (
|
|||
model.Restrictions_Template,
|
||||
model.Restrictions_Duplicate,
|
||||
}
|
||||
objFileRestrictions = ObjectRestrictions{
|
||||
objRestrictEditAndDuplicate = ObjectRestrictions{
|
||||
model.Restrictions_Blocks,
|
||||
model.Restrictions_LayoutChange,
|
||||
model.Restrictions_TypeChange,
|
||||
|
@ -63,7 +63,7 @@ var (
|
|||
model.ObjectType_collection: objRestrictEdit,
|
||||
model.ObjectType_objectType: objRestrictEdit,
|
||||
model.ObjectType_relation: objRestrictEdit,
|
||||
model.ObjectType_file: objFileRestrictions,
|
||||
model.ObjectType_file: objRestrictEditAndDuplicate,
|
||||
model.ObjectType_dashboard: {
|
||||
model.Restrictions_Details,
|
||||
model.Restrictions_Relations,
|
||||
|
@ -85,8 +85,8 @@ var (
|
|||
model.Restrictions_Template,
|
||||
},
|
||||
model.ObjectType_participant: objRestrictAll,
|
||||
model.ObjectType_chat: objRestrictEdit,
|
||||
model.ObjectType_chatDerived: objRestrictEdit,
|
||||
model.ObjectType_chat: objRestrictEditAndDuplicate,
|
||||
model.ObjectType_chatDerived: objRestrictEditAndDuplicate,
|
||||
model.ObjectType_tag: objRestrictEdit,
|
||||
}
|
||||
|
||||
|
@ -117,7 +117,7 @@ var (
|
|||
model.Restrictions_Template,
|
||||
model.Restrictions_Duplicate,
|
||||
},
|
||||
smartblock.SmartBlockTypeFileObject: objFileRestrictions,
|
||||
smartblock.SmartBlockTypeFileObject: objRestrictEditAndDuplicate,
|
||||
smartblock.SmartBlockTypeArchive: objRestrictAll,
|
||||
smartblock.SmartBlockTypeBundledRelation: objRestrictAll,
|
||||
smartblock.SmartBlockTypeSubObject: objRestrictEdit,
|
||||
|
@ -143,7 +143,9 @@ var (
|
|||
smartblock.SmartBlockTypeAccountOld: {
|
||||
model.Restrictions_Template,
|
||||
},
|
||||
smartblock.SmartBlockTypeParticipant: objRestrictAll,
|
||||
smartblock.SmartBlockTypeParticipant: objRestrictAll,
|
||||
smartblock.SmartBlockTypeChatObject: objRestrictEditAndDuplicate,
|
||||
smartblock.SmartBlockTypeChatDerivedObject: objRestrictEditAndDuplicate,
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -92,6 +92,12 @@ func TestService_ObjectRestrictionsById(t *testing.T) {
|
|||
bundledRelation := givenRestrictionHolder(coresb.SmartBlockTypeBundledRelation, bundle.TypeKeyRelation)
|
||||
assert.ErrorIs(t, rs.GetRestrictions(bundledRelation).Object.Check(objRestrictAll...), ErrRestricted)
|
||||
})
|
||||
|
||||
t.Run("chat should have edit and duplication restrictions", func(t *testing.T) {
|
||||
assert.ErrorIs(t, rs.GetRestrictions(givenRestrictionHolder(coresb.SmartBlockTypeChatObject, bundle.TypeKeyChat)).Object.Check(
|
||||
objRestrictEditAndDuplicate...,
|
||||
), ErrRestricted)
|
||||
})
|
||||
}
|
||||
|
||||
func TestTemplateRestriction(t *testing.T) {
|
||||
|
|
|
@ -40,6 +40,7 @@ import (
|
|||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/core"
|
||||
coresb "github.com/anyproto/anytype-heart/pkg/lib/core/smartblock"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/database"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/logging"
|
||||
|
@ -302,6 +303,54 @@ func (s *Service) SpaceInstallBundledObjects(
|
|||
return s.objectCreator.InstallBundledObjects(ctx, spc, sourceObjectIds, false)
|
||||
}
|
||||
|
||||
func (s *Service) SpaceInitChat(ctx context.Context, spaceId string) error {
|
||||
spc, err := s.spaceService.Get(ctx, spaceId)
|
||||
if err != nil {
|
||||
return fmt.Errorf("get space: %w", err)
|
||||
}
|
||||
if spc.IsReadOnly() {
|
||||
return nil
|
||||
}
|
||||
if spc.IsPersonal() {
|
||||
return nil
|
||||
}
|
||||
|
||||
workspaceId := spc.DerivedIDs().Workspace
|
||||
chatUk, err := domain.NewUniqueKey(coresb.SmartBlockTypeChatDerivedObject, workspaceId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
chatId, err := spc.DeriveObjectID(context.Background(), chatUk)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if spaceChatExists, err := spc.Storage().HasTree(chatId); err != nil {
|
||||
return err
|
||||
} else if !spaceChatExists {
|
||||
_, err = s.objectCreator.AddChatDerivedObject(ctx, spc, workspaceId)
|
||||
if err != nil {
|
||||
if !errors.Is(err, treestorage.ErrTreeExists) {
|
||||
return fmt.Errorf("add chat derived object: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
err = spc.DoCtx(ctx, workspaceId, func(b smartblock.SmartBlock) error {
|
||||
st := b.NewState()
|
||||
st.SetLocalDetail(bundle.RelationKeyChatId.String(), pbtypes.String(chatId))
|
||||
st.SetDetail(bundle.RelationKeyHasChat.String(), pbtypes.Bool(true))
|
||||
|
||||
return b.Apply(st, smartblock.NoHistory, smartblock.NoEvent, smartblock.SkipIfNoChanges, smartblock.KeepInternalFlags, smartblock.IgnoreNoPermissions)
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("apply chatId to workspace: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Service) SelectWorkspace(req *pb.RpcWorkspaceSelectRequest) error {
|
||||
panic("should be removed")
|
||||
}
|
||||
|
@ -571,8 +620,12 @@ func (s *Service) pasteBlocks(id string, content *bookmark.ObjectContent) error
|
|||
}
|
||||
for _, r := range uploadArr {
|
||||
r.ContextId = id
|
||||
uploadReq := UploadRequest{RpcBlockUploadRequest: r, ObjectOrigin: objectorigin.Webclipper()}
|
||||
if err = s.UploadBlockFile(nil, uploadReq, groupID); err != nil {
|
||||
uploadReq := UploadRequest{
|
||||
RpcBlockUploadRequest: r,
|
||||
ObjectOrigin: objectorigin.Webclipper(),
|
||||
ImageKind: model.ImageKind_AutomaticallyAdded,
|
||||
}
|
||||
if _, err = s.UploadBlockFile(nil, uploadReq, groupID, false); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,187 +3,115 @@ package source
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gogo/protobuf/types"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/state"
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/template"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/core/smartblock"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/addr"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
"github.com/anyproto/anytype-heart/util/dateutil"
|
||||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
)
|
||||
|
||||
func NewDate(space Space, id string) (s Source) {
|
||||
type DateSourceParams struct {
|
||||
Id domain.FullID
|
||||
DateObjectTypeId string
|
||||
}
|
||||
|
||||
func NewDate(params DateSourceParams) (s Source) {
|
||||
return &date{
|
||||
id: id,
|
||||
space: space,
|
||||
id: params.Id.ObjectID,
|
||||
spaceId: params.Id.SpaceID,
|
||||
typeId: params.DateObjectTypeId,
|
||||
}
|
||||
}
|
||||
|
||||
type date struct {
|
||||
space Space
|
||||
id string
|
||||
t time.Time
|
||||
id, spaceId, typeId string
|
||||
}
|
||||
|
||||
func (v *date) ListIds() ([]string, error) {
|
||||
func (d *date) ListIds() ([]string, error) {
|
||||
return []string{}, nil
|
||||
}
|
||||
|
||||
func (v *date) ReadOnly() bool {
|
||||
func (d *date) ReadOnly() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (v *date) Id() string {
|
||||
return v.id
|
||||
func (d *date) Id() string {
|
||||
return d.id
|
||||
}
|
||||
|
||||
func (v *date) SpaceID() string {
|
||||
if v.space == nil {
|
||||
return ""
|
||||
}
|
||||
return v.space.Id()
|
||||
func (d *date) SpaceID() string {
|
||||
return d.spaceId
|
||||
}
|
||||
|
||||
func (v *date) Type() smartblock.SmartBlockType {
|
||||
func (d *date) Type() smartblock.SmartBlockType {
|
||||
return smartblock.SmartBlockTypeDate
|
||||
}
|
||||
|
||||
func (v *date) getDetails(ctx context.Context) (*types.Struct, error) {
|
||||
linksRelationId, err := v.space.GetRelationIdByKey(ctx, bundle.RelationKeyLinks)
|
||||
func (d *date) getDetails() (*types.Struct, error) {
|
||||
t, err := dateutil.ParseDateId(d.id)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get links relation id: %w", err)
|
||||
}
|
||||
dateTypeId, err := v.space.GetTypeIdByKey(ctx, bundle.TypeKeyDate)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get date type id: %w", err)
|
||||
return nil, fmt.Errorf("failed to parse date id: %w", err)
|
||||
}
|
||||
|
||||
return &types.Struct{Fields: map[string]*types.Value{
|
||||
bundle.RelationKeyName.String(): pbtypes.String(v.t.Format("Mon Jan 2 2006")),
|
||||
bundle.RelationKeyId.String(): pbtypes.String(v.id),
|
||||
bundle.RelationKeyName.String(): pbtypes.String(dateutil.TimeToDateName(t)),
|
||||
bundle.RelationKeyId.String(): pbtypes.String(d.id),
|
||||
bundle.RelationKeyType.String(): pbtypes.String(d.typeId),
|
||||
bundle.RelationKeyIsReadonly.String(): pbtypes.Bool(true),
|
||||
bundle.RelationKeyIsArchived.String(): pbtypes.Bool(false),
|
||||
bundle.RelationKeyIsHidden.String(): pbtypes.Bool(false),
|
||||
bundle.RelationKeyLayout.String(): pbtypes.Float64(float64(model.ObjectType_date)),
|
||||
bundle.RelationKeyIconEmoji.String(): pbtypes.String("📅"),
|
||||
bundle.RelationKeySpaceId.String(): pbtypes.String(v.SpaceID()),
|
||||
bundle.RelationKeySetOf.String(): pbtypes.StringList([]string{linksRelationId}),
|
||||
bundle.RelationKeyType.String(): pbtypes.String(dateTypeId),
|
||||
bundle.RelationKeySpaceId.String(): pbtypes.String(d.SpaceID()),
|
||||
bundle.RelationKeyTimestamp.String(): pbtypes.Int64(t.Unix()),
|
||||
}}, nil
|
||||
}
|
||||
|
||||
// TODO Fix?
|
||||
func (v *date) DetailsFromId() (*types.Struct, error) {
|
||||
if err := v.parseId(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &types.Struct{Fields: map[string]*types.Value{
|
||||
bundle.RelationKeyName.String(): pbtypes.String(v.t.Format("Mon Jan 2 2006")),
|
||||
bundle.RelationKeyId.String(): pbtypes.String(v.id),
|
||||
bundle.RelationKeyIsReadonly.String(): pbtypes.Bool(true),
|
||||
bundle.RelationKeyIsArchived.String(): pbtypes.Bool(false),
|
||||
bundle.RelationKeyIsHidden.String(): pbtypes.Bool(false),
|
||||
bundle.RelationKeyLayout.String(): pbtypes.Float64(float64(model.ObjectType_date)),
|
||||
bundle.RelationKeyIconEmoji.String(): pbtypes.String("📅"),
|
||||
bundle.RelationKeySpaceId.String(): pbtypes.String(v.SpaceID()),
|
||||
}}, nil
|
||||
func (d *date) DetailsFromId() (*types.Struct, error) {
|
||||
return d.getDetails()
|
||||
}
|
||||
|
||||
func (v *date) parseId() error {
|
||||
t, err := time.Parse("2006-01-02", strings.TrimPrefix(v.id, addr.DatePrefix))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.t = t
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *date) ReadDoc(ctx context.Context, receiver ChangeReceiver, empty bool) (doc state.Doc, err error) {
|
||||
if err = v.parseId(); err != nil {
|
||||
return
|
||||
}
|
||||
s := state.NewDoc(v.id, nil).(*state.State)
|
||||
d, err := v.getDetails(ctx)
|
||||
func (d *date) ReadDoc(context.Context, ChangeReceiver, bool) (doc state.Doc, err error) {
|
||||
details, err := d.getDetails()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
dataview := &model.BlockContentOfDataview{
|
||||
Dataview: &model.BlockContentDataview{
|
||||
RelationLinks: []*model.RelationLink{
|
||||
{
|
||||
Key: bundle.RelationKeyName.String(),
|
||||
Format: model.RelationFormat_shorttext,
|
||||
},
|
||||
{
|
||||
Key: bundle.RelationKeyLastModifiedDate.String(),
|
||||
Format: model.RelationFormat_date,
|
||||
},
|
||||
},
|
||||
Views: []*model.BlockContentDataviewView{
|
||||
{
|
||||
Id: "1",
|
||||
Type: model.BlockContentDataviewView_Table,
|
||||
Name: "Date backlinks",
|
||||
Sorts: []*model.BlockContentDataviewSort{
|
||||
{
|
||||
RelationKey: bundle.RelationKeyLastModifiedDate.String(),
|
||||
Type: model.BlockContentDataviewSort_Desc,
|
||||
},
|
||||
},
|
||||
Filters: []*model.BlockContentDataviewFilter{
|
||||
{
|
||||
RelationKey: bundle.RelationKeyLinks.String(),
|
||||
Condition: model.BlockContentDataviewFilter_In,
|
||||
Value: pbtypes.String(v.id),
|
||||
},
|
||||
},
|
||||
Relations: []*model.BlockContentDataviewRelation{
|
||||
{
|
||||
Key: bundle.RelationKeyName.String(),
|
||||
IsVisible: true,
|
||||
},
|
||||
{
|
||||
Key: bundle.RelationKeyLastModifiedDate.String(),
|
||||
IsVisible: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
s := state.NewDoc(d.id, nil).(*state.State)
|
||||
template.InitTemplate(s,
|
||||
template.WithTitle,
|
||||
template.WithDefaultFeaturedRelations,
|
||||
template.WithDataview(dataview, true),
|
||||
template.WithAllBlocksEditsRestricted,
|
||||
)
|
||||
s.SetDetails(d)
|
||||
s.SetDetails(details)
|
||||
s.SetObjectTypeKey(bundle.TypeKeyDate)
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func (v *date) PushChange(params PushChangeParams) (id string, err error) {
|
||||
func (d *date) PushChange(PushChangeParams) (id string, err error) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func (v *date) Close() (err error) {
|
||||
func (d *date) Close() (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func (v *date) Heads() []string {
|
||||
return []string{v.id}
|
||||
func (d *date) Heads() []string {
|
||||
return []string{d.id}
|
||||
}
|
||||
|
||||
func (s *date) GetFileKeysSnapshot() []*pb.ChangeFileKeys {
|
||||
func (d *date) GetFileKeysSnapshot() []*pb.ChangeFileKeys {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *date) GetCreationInfo() (creatorObjectId string, createdDate int64, err error) {
|
||||
func (d *date) GetCreationInfo() (creatorObjectId string, createdDate int64, err error) {
|
||||
return addr.AnytypeProfileId, 0, nil
|
||||
}
|
||||
|
|
|
@ -19,127 +19,139 @@ var text = "-- Еh bien, mon prince. Gênes et Lucques ne sont plus que des apan
|
|||
|
||||
func TestMarshallChange(t *testing.T) {
|
||||
t.Run("marshall small change", func(t *testing.T) {
|
||||
//given
|
||||
// given
|
||||
c := changeWithSmallTextUpdate()
|
||||
|
||||
//when
|
||||
// when
|
||||
data, dt, err := MarshalChange(c)
|
||||
|
||||
//then
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.NotZero(t, len(data))
|
||||
assert.Empty(t, dt)
|
||||
})
|
||||
|
||||
t.Run("marshall bigger change", func(t *testing.T) {
|
||||
//given
|
||||
// given
|
||||
c := changeWithSetBigDetail(snappyLowerLimit)
|
||||
|
||||
//when
|
||||
// when
|
||||
data, dt, err := MarshalChange(c)
|
||||
|
||||
//then
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, data)
|
||||
assert.Equal(t, dataTypeSnappy, dt)
|
||||
})
|
||||
}
|
||||
|
||||
func TestNewUnmarshalTreeChange(t *testing.T) {
|
||||
ch1, _, _ := MarshalChange(changeWithBigSnapshot())
|
||||
ch2, _, _ := MarshalChange(changeWithBigSnapshot())
|
||||
unmarshalF := NewUnmarshalTreeChange()
|
||||
res1, err := unmarshalF(&objecttree.Change{DataType: dataTypeSnappy}, ch1)
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, res1.(*pb.Change).Snapshot)
|
||||
res2, err := unmarshalF(&objecttree.Change{DataType: dataTypeSnappy}, ch2)
|
||||
require.NoError(t, err)
|
||||
assert.Nil(t, res2.(*pb.Change).Snapshot)
|
||||
}
|
||||
|
||||
func TestUnmarshallChange(t *testing.T) {
|
||||
invalidDataType := "invalid"
|
||||
|
||||
t.Run("unmarshall small change", func(t *testing.T) {
|
||||
//given
|
||||
// given
|
||||
c := changeWithSmallTextUpdate()
|
||||
data, dt, err := MarshalChange(c)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, data)
|
||||
require.Empty(t, dt)
|
||||
|
||||
//when
|
||||
// when
|
||||
res, err := UnmarshalChange(&objecttree.Change{DataType: dt}, data)
|
||||
|
||||
//then
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, c, res)
|
||||
})
|
||||
|
||||
t.Run("unmarshall bigger change", func(t *testing.T) {
|
||||
//given
|
||||
// given
|
||||
c := changeWithSetBigDetail(snappyLowerLimit)
|
||||
data, dt, err := MarshalChange(c)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, data)
|
||||
require.Equal(t, dataTypeSnappy, dt)
|
||||
|
||||
//when
|
||||
// when
|
||||
res, err := UnmarshalChange(&objecttree.Change{DataType: dt}, data)
|
||||
|
||||
//then
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, c, res)
|
||||
})
|
||||
|
||||
t.Run("unmarshall plain change with invalid data type", func(t *testing.T) {
|
||||
//given
|
||||
// given
|
||||
c := changeWithSmallTextUpdate()
|
||||
data, dt, err := MarshalChange(c)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, data)
|
||||
require.Empty(t, dt)
|
||||
|
||||
//when
|
||||
// when
|
||||
res, err := UnmarshalChange(&objecttree.Change{DataType: invalidDataType}, data)
|
||||
|
||||
//then
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, c, res)
|
||||
})
|
||||
|
||||
t.Run("unmarshall plain change with encoded data type", func(t *testing.T) {
|
||||
//given
|
||||
// given
|
||||
c := changeWithSmallTextUpdate()
|
||||
data, dt, err := MarshalChange(c)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, data)
|
||||
require.Empty(t, dt)
|
||||
|
||||
//when
|
||||
// when
|
||||
res, err := UnmarshalChange(&objecttree.Change{DataType: dataTypeSnappy}, data)
|
||||
|
||||
//then
|
||||
// then
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, c, res)
|
||||
})
|
||||
|
||||
t.Run("unmarshall bigger change with empty data type", func(t *testing.T) {
|
||||
//given
|
||||
// given
|
||||
c := changeWithSetBigDetail(snappyLowerLimit)
|
||||
data, dt, err := MarshalChange(c)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, data)
|
||||
require.Equal(t, dataTypeSnappy, dt)
|
||||
|
||||
//when
|
||||
// when
|
||||
res, err := UnmarshalChange(&objecttree.Change{DataType: ""}, data)
|
||||
|
||||
//then
|
||||
// then
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, res)
|
||||
})
|
||||
|
||||
t.Run("unmarshall encoded change with invalid data type", func(t *testing.T) {
|
||||
//given
|
||||
// given
|
||||
c := changeWithSetBigDetail(snappyLowerLimit)
|
||||
data, dt, err := MarshalChange(c)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, data)
|
||||
require.Equal(t, dataTypeSnappy, dt)
|
||||
|
||||
//when
|
||||
// when
|
||||
res, err := UnmarshalChange(&objecttree.Change{DataType: invalidDataType}, data)
|
||||
|
||||
//then
|
||||
// then
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, res)
|
||||
})
|
||||
|
|
|
@ -7,6 +7,8 @@ import (
|
|||
|
||||
app "github.com/anyproto/any-sync/app"
|
||||
|
||||
domain "github.com/anyproto/anytype-heart/core/domain"
|
||||
|
||||
mock "github.com/stretchr/testify/mock"
|
||||
|
||||
smartblock "github.com/anyproto/anytype-heart/pkg/lib/core/smartblock"
|
||||
|
@ -30,7 +32,7 @@ func (_m *MockService) EXPECT() *MockService_Expecter {
|
|||
}
|
||||
|
||||
// DetailsFromIdBasedSource provides a mock function with given fields: id
|
||||
func (_m *MockService) DetailsFromIdBasedSource(id string) (*types.Struct, error) {
|
||||
func (_m *MockService) DetailsFromIdBasedSource(id domain.FullID) (*types.Struct, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
if len(ret) == 0 {
|
||||
|
@ -39,10 +41,10 @@ func (_m *MockService) DetailsFromIdBasedSource(id string) (*types.Struct, error
|
|||
|
||||
var r0 *types.Struct
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(string) (*types.Struct, error)); ok {
|
||||
if rf, ok := ret.Get(0).(func(domain.FullID) (*types.Struct, error)); ok {
|
||||
return rf(id)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(string) *types.Struct); ok {
|
||||
if rf, ok := ret.Get(0).(func(domain.FullID) *types.Struct); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
|
@ -50,7 +52,7 @@ func (_m *MockService) DetailsFromIdBasedSource(id string) (*types.Struct, error
|
|||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(string) error); ok {
|
||||
if rf, ok := ret.Get(1).(func(domain.FullID) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
|
@ -65,14 +67,14 @@ type MockService_DetailsFromIdBasedSource_Call struct {
|
|||
}
|
||||
|
||||
// DetailsFromIdBasedSource is a helper method to define mock.On call
|
||||
// - id string
|
||||
// - id domain.FullID
|
||||
func (_e *MockService_Expecter) DetailsFromIdBasedSource(id interface{}) *MockService_DetailsFromIdBasedSource_Call {
|
||||
return &MockService_DetailsFromIdBasedSource_Call{Call: _e.mock.On("DetailsFromIdBasedSource", id)}
|
||||
}
|
||||
|
||||
func (_c *MockService_DetailsFromIdBasedSource_Call) Run(run func(id string)) *MockService_DetailsFromIdBasedSource_Call {
|
||||
func (_c *MockService_DetailsFromIdBasedSource_Call) Run(run func(id domain.FullID)) *MockService_DetailsFromIdBasedSource_Call {
|
||||
_c.Call.Run(func(args mock.Arguments) {
|
||||
run(args[0].(string))
|
||||
run(args[0].(domain.FullID))
|
||||
})
|
||||
return _c
|
||||
}
|
||||
|
@ -82,7 +84,7 @@ func (_c *MockService_DetailsFromIdBasedSource_Call) Return(_a0 *types.Struct, _
|
|||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockService_DetailsFromIdBasedSource_Call) RunAndReturn(run func(string) (*types.Struct, error)) *MockService_DetailsFromIdBasedSource_Call {
|
||||
func (_c *MockService_DetailsFromIdBasedSource_Call) RunAndReturn(run func(domain.FullID) (*types.Struct, error)) *MockService_DetailsFromIdBasedSource_Call {
|
||||
_c.Call.Return(run)
|
||||
return _c
|
||||
}
|
||||
|
|
|
@ -22,10 +22,13 @@ import (
|
|||
"github.com/anyproto/anytype-heart/core/files"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/core/smartblock"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/database"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/addr"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
"github.com/anyproto/anytype-heart/space/spacecore/storage"
|
||||
"github.com/anyproto/anytype-heart/space/spacecore/typeprovider"
|
||||
"github.com/anyproto/anytype-heart/util/pbtypes"
|
||||
)
|
||||
|
||||
const CName = "source"
|
||||
|
@ -55,7 +58,7 @@ type Service interface {
|
|||
RegisterStaticSource(s Source) error
|
||||
NewStaticSource(params StaticSourceParams) SourceWithType
|
||||
|
||||
DetailsFromIdBasedSource(id string) (*types.Struct, error)
|
||||
DetailsFromIdBasedSource(id domain.FullID) (*types.Struct, error)
|
||||
IDsListerBySmartblockType(space Space, blockType smartblock.SmartBlockType) (IDsLister, error)
|
||||
app.Component
|
||||
}
|
||||
|
@ -113,9 +116,7 @@ func (b *BuildOptions) BuildTreeOpts() objecttreebuilder.BuildTreeOpts {
|
|||
}
|
||||
return ot, nil
|
||||
},
|
||||
TreeValidator: func(payload treestorage.TreeStorageCreatePayload, buildFunc objecttree.BuildObjectTreeFunc, aclList list.AclList) (retPayload treestorage.TreeStorageCreatePayload, err error) {
|
||||
return objecttree.ValidateFilterRawTree(payload, aclList)
|
||||
},
|
||||
TreeValidator: objecttree.ValidateFilterRawTree,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -142,7 +143,17 @@ func (s *service) newSource(ctx context.Context, space Space, id string, buildOp
|
|||
if err == nil {
|
||||
switch st {
|
||||
case smartblock.SmartBlockTypeDate:
|
||||
return NewDate(space, id), nil
|
||||
typeId, err := space.GetTypeIdByKey(context.Background(), bundle.TypeKeyDate)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find Date type to build Date object: %w", err)
|
||||
}
|
||||
return NewDate(DateSourceParams{
|
||||
Id: domain.FullID{
|
||||
ObjectID: id,
|
||||
SpaceID: space.Id(),
|
||||
},
|
||||
DateObjectTypeId: typeId,
|
||||
}), nil
|
||||
case smartblock.SmartBlockTypeBundledObjectType:
|
||||
return NewBundledObjectType(id), nil
|
||||
case smartblock.SmartBlockTypeBundledRelation:
|
||||
|
@ -203,12 +214,31 @@ func (s *service) IDsListerBySmartblockType(space Space, blockType smartblock.Sm
|
|||
}
|
||||
}
|
||||
|
||||
func (s *service) DetailsFromIdBasedSource(id string) (*types.Struct, error) {
|
||||
if !strings.HasPrefix(id, addr.DatePrefix) {
|
||||
func (s *service) DetailsFromIdBasedSource(id domain.FullID) (*types.Struct, error) {
|
||||
if !strings.HasPrefix(id.ObjectID, addr.DatePrefix) {
|
||||
return nil, fmt.Errorf("unsupported id")
|
||||
}
|
||||
// TODO Fix this, but how? It's broken by design, because no one pass spaceId here
|
||||
ss := NewDate(nil, id)
|
||||
|
||||
records, err := s.objectStore.SpaceIndex(id.SpaceID).Query(database.Query{
|
||||
Filters: []*model.BlockContentDataviewFilter{{
|
||||
Condition: model.BlockContentDataviewFilter_Equal,
|
||||
RelationKey: bundle.RelationKeyUniqueKey.String(),
|
||||
Value: pbtypes.String(bundle.TypeKeyDate.URL()),
|
||||
},
|
||||
}})
|
||||
|
||||
if len(records) != 1 && err == nil {
|
||||
err = fmt.Errorf("expected 1 record, got %d", len(records))
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to query details of Date type object: %w", err)
|
||||
}
|
||||
|
||||
ss := NewDate(DateSourceParams{
|
||||
Id: id,
|
||||
DateObjectTypeId: pbtypes.GetString(records[0].Details, bundle.RelationKeyId.String()),
|
||||
})
|
||||
defer ss.Close()
|
||||
if v, ok := ss.(SourceIdEndodedDetails); ok {
|
||||
return v.DetailsFromId()
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
"fmt"
|
||||
"math/rand"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/anyproto/any-sync/accountservice"
|
||||
|
@ -75,7 +76,16 @@ func MarshalChange(change *pb.Change) (result []byte, dataType string, err error
|
|||
}
|
||||
|
||||
func UnmarshalChange(treeChange *objecttree.Change, data []byte) (result any, err error) {
|
||||
change := &pb.Change{}
|
||||
return unmarshalChange(treeChange, data, true)
|
||||
}
|
||||
|
||||
func unmarshalChange(treeChange *objecttree.Change, data []byte, needSnapshot bool) (result any, err error) {
|
||||
var change proto.Message
|
||||
if needSnapshot {
|
||||
change = &pb.Change{}
|
||||
} else {
|
||||
change = &pb.ChangeNoSnapshot{}
|
||||
}
|
||||
if treeChange.DataType == dataTypeSnappy {
|
||||
buf := bytesPool.Get().([]byte)[:0]
|
||||
defer func() {
|
||||
|
@ -92,10 +102,28 @@ func UnmarshalChange(treeChange *objecttree.Change, data []byte) (result any, er
|
|||
}
|
||||
}
|
||||
}
|
||||
if err = proto.Unmarshal(data, change); err == nil {
|
||||
result = change
|
||||
if err = proto.Unmarshal(data, change); err != nil {
|
||||
return
|
||||
}
|
||||
if needSnapshot {
|
||||
return change, nil
|
||||
} else {
|
||||
noSnapshotChange := change.(*pb.ChangeNoSnapshot)
|
||||
return &pb.Change{
|
||||
Content: noSnapshotChange.Content,
|
||||
FileKeys: noSnapshotChange.FileKeys,
|
||||
Timestamp: noSnapshotChange.Timestamp,
|
||||
Version: noSnapshotChange.Version,
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
|
||||
// NewUnmarshalTreeChange creates UnmarshalChange func that unmarshalls snapshot only for the first change and ignores it for following. It saves some memory
|
||||
func NewUnmarshalTreeChange() objecttree.ChangeConvertFunc {
|
||||
var changeCount atomic.Int32
|
||||
return func(treeChange *objecttree.Change, data []byte) (result any, err error) {
|
||||
return unmarshalChange(treeChange, data, changeCount.CompareAndSwap(0, 1))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func UnmarshalChangeWithDataType(dataType string, decrypted []byte) (res any, err error) {
|
||||
|
@ -552,7 +580,7 @@ func BuildState(spaceId string, initState *state.State, ot objecttree.ReadableOb
|
|||
return
|
||||
}
|
||||
var lastMigrationVersion uint32
|
||||
err = ot.IterateFrom(startId, UnmarshalChange,
|
||||
err = ot.IterateFrom(startId, NewUnmarshalTreeChange(),
|
||||
func(change *objecttree.Change) bool {
|
||||
count++
|
||||
lastChange = change
|
||||
|
|
|
@ -5,6 +5,7 @@ package dot
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
|
||||
|
@ -43,6 +44,7 @@ type linkInfo struct {
|
|||
}
|
||||
|
||||
type dot struct {
|
||||
ctx context.Context
|
||||
graph *cgraph.Graph
|
||||
graphviz *graphviz.Graphviz
|
||||
knownDocs map[string]*types.Struct
|
||||
|
@ -58,13 +60,18 @@ func NewMultiConverter(
|
|||
format graphviz.Format,
|
||||
sbtProvider typeprovider.SmartBlockTypeProvider,
|
||||
) converter.MultiConverter {
|
||||
g := graphviz.New()
|
||||
ctx := context.Background()
|
||||
g, err := graphviz.New(ctx)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
graph, err := g.Graph()
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &dot{
|
||||
ctx: ctx,
|
||||
graph: graph,
|
||||
graphviz: g,
|
||||
exportFormat: format,
|
||||
|
@ -88,7 +95,7 @@ func (d *dot) ImageHashes() []string {
|
|||
}
|
||||
|
||||
func (d *dot) Add(space smartblock.Space, st *state.State) error {
|
||||
n, e := d.graph.CreateNode(st.RootId())
|
||||
n, e := d.graph.CreateNodeByName(st.RootId())
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
|
@ -159,7 +166,7 @@ func (d *dot) Convert(sbType model.SmartBlockType) []byte {
|
|||
if !exists {
|
||||
continue
|
||||
}
|
||||
e, err = d.graph.CreateEdge("", source, target)
|
||||
e, err = d.graph.CreateEdgeByName("", source, target)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
@ -173,7 +180,7 @@ func (d *dot) Convert(sbType model.SmartBlockType) []byte {
|
|||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if err = d.graphviz.Render(d.graph, d.exportFormat, &buf); err != nil {
|
||||
if err = d.graphviz.Render(d.ctx, d.graph, d.exportFormat, &buf); err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
155
core/date/suggest.go
Normal file
155
core/date/suggest.go
Normal file
|
@ -0,0 +1,155 @@
|
|||
package date
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/anyproto/go-naturaldate/v2"
|
||||
"github.com/araddon/dateparse"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/source"
|
||||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/database"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore"
|
||||
"github.com/anyproto/anytype-heart/space"
|
||||
"github.com/anyproto/anytype-heart/util/dateutil"
|
||||
)
|
||||
|
||||
func EnrichRecordsWithDateSuggestion(
|
||||
ctx context.Context,
|
||||
records []database.Record,
|
||||
req *pb.RpcObjectSearchRequest,
|
||||
store objectstore.ObjectStore,
|
||||
spaceService space.Service,
|
||||
) ([]database.Record, error) {
|
||||
dt := suggestDateForSearch(time.Now(), req.FullText)
|
||||
if dt.IsZero() {
|
||||
return records, nil
|
||||
}
|
||||
|
||||
id := dateutil.TimeToDateId(dt)
|
||||
|
||||
// Don't duplicate search suggestions
|
||||
var found bool
|
||||
for _, r := range records {
|
||||
if r.Details == nil || r.Details.Fields == nil {
|
||||
continue
|
||||
}
|
||||
if v, ok := r.Details.Fields[bundle.RelationKeyId.String()]; ok {
|
||||
if v.GetStringValue() == id {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if found {
|
||||
return records, nil
|
||||
}
|
||||
|
||||
spc, err := spaceService.Get(ctx, req.SpaceId)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get space: %w", err)
|
||||
}
|
||||
|
||||
rec, err := makeSuggestedDateRecord(spc, dt)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("make date record: %w", err)
|
||||
}
|
||||
f, _ := database.MakeFilters(req.Filters, store.SpaceIndex(req.SpaceId)) //nolint:errcheck
|
||||
if f.FilterObject(rec.Details) {
|
||||
return append([]database.Record{rec}, records...), nil
|
||||
}
|
||||
return records, nil
|
||||
}
|
||||
|
||||
func suggestDateForSearch(now time.Time, raw string) time.Time {
|
||||
suggesters := []func() time.Time{
|
||||
func() time.Time {
|
||||
var exprType naturaldate.ExprType
|
||||
t, exprType, err := naturaldate.Parse(raw, now)
|
||||
if err != nil {
|
||||
return time.Time{}
|
||||
}
|
||||
if exprType == naturaldate.ExprTypeInvalid {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
// naturaldate parses numbers without qualifiers (m,s) as hours in 24 hours clock format. It leads to weird behavior
|
||||
// when inputs like "123" represented as "current time + 123 hours"
|
||||
if (exprType & naturaldate.ExprTypeClock24Hour) != 0 {
|
||||
t = time.Time{}
|
||||
}
|
||||
return t
|
||||
},
|
||||
func() time.Time {
|
||||
// Don't use plain numbers, because they will be represented as years
|
||||
if _, err := strconv.Atoi(strings.TrimSpace(raw)); err == nil {
|
||||
return time.Time{}
|
||||
}
|
||||
// todo: use system locale to get preferred date format
|
||||
t, err := dateparse.ParseIn(raw, now.Location(), dateparse.PreferMonthFirst(false))
|
||||
if err != nil {
|
||||
return time.Time{}
|
||||
}
|
||||
return t
|
||||
},
|
||||
}
|
||||
|
||||
var t time.Time
|
||||
for _, s := range suggesters {
|
||||
if t = s(); !t.IsZero() {
|
||||
break
|
||||
}
|
||||
}
|
||||
if t.IsZero() {
|
||||
return t
|
||||
}
|
||||
|
||||
// Sanitize date
|
||||
|
||||
// Date without year
|
||||
if t.Year() == 0 {
|
||||
_, month, day := t.Date()
|
||||
h, m, s := t.Clock()
|
||||
t = time.Date(now.Year(), month, day, h, m, s, 0, t.Location())
|
||||
}
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
func makeSuggestedDateRecord(spc source.Space, t time.Time) (database.Record, error) {
|
||||
id := dateutil.TimeToDateId(t)
|
||||
|
||||
typeId, err := spc.GetTypeIdByKey(context.Background(), bundle.TypeKeyDate)
|
||||
if err != nil {
|
||||
return database.Record{}, fmt.Errorf("failed to find Date type to build Date object: %w", err)
|
||||
}
|
||||
|
||||
dateSource := source.NewDate(source.DateSourceParams{
|
||||
Id: domain.FullID{
|
||||
ObjectID: id,
|
||||
SpaceID: spc.Id(),
|
||||
},
|
||||
DateObjectTypeId: typeId,
|
||||
})
|
||||
|
||||
v, ok := dateSource.(source.SourceIdEndodedDetails)
|
||||
if !ok {
|
||||
return database.Record{}, fmt.Errorf("source does not implement DetailsFromId")
|
||||
}
|
||||
|
||||
details, err := v.DetailsFromId()
|
||||
if err != nil {
|
||||
return database.Record{}, err
|
||||
}
|
||||
|
||||
return database.Record{
|
||||
Details: details,
|
||||
}, nil
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package core
|
||||
package date
|
||||
|
||||
import (
|
||||
"testing"
|
|
@ -3,10 +3,12 @@ package core
|
|||
import (
|
||||
"context"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/application"
|
||||
"github.com/anyproto/anytype-heart/core/block"
|
||||
"github.com/anyproto/anytype-heart/core/debug"
|
||||
"github.com/anyproto/anytype-heart/core/subscription"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/environment"
|
||||
)
|
||||
|
||||
func (mw *Middleware) DebugTree(cctx context.Context, req *pb.RpcDebugTreeRequest) *pb.RpcDebugTreeResponse {
|
||||
|
@ -238,6 +240,21 @@ func (mw *Middleware) DebugAccountSelectTrace(cctx context.Context, req *pb.RpcD
|
|||
}
|
||||
}
|
||||
|
||||
func (mw *Middleware) DebugExportLog(cctx context.Context, req *pb.RpcDebugExportLogRequest) *pb.RpcDebugExportLogResponse {
|
||||
path, err := mw.applicationService.SaveLog(environment.LOG_PATH, req.Dir)
|
||||
|
||||
code := mapErrorCode(err,
|
||||
errToCode(application.ErrNoFolder, pb.RpcDebugExportLogResponseError_NO_FOLDER),
|
||||
)
|
||||
return &pb.RpcDebugExportLogResponse{
|
||||
Path: path,
|
||||
Error: &pb.RpcDebugExportLogResponseError{
|
||||
Code: code,
|
||||
Description: getErrorDescription(err),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (mw *Middleware) DebugAnystoreObjectChanges(cctx context.Context, req *pb.RpcDebugAnystoreObjectChangesRequest) *pb.RpcDebugAnystoreObjectChangesResponse {
|
||||
debugService := getService[debug.Debug](mw)
|
||||
changes, wrongOrder, err := debugService.DebugAnystoreObjectChanges(cctx, req.ObjectId, req.OrderBy)
|
||||
|
@ -255,3 +272,19 @@ func (mw *Middleware) DebugAnystoreObjectChanges(cctx context.Context, req *pb.R
|
|||
WrongOrder: wrongOrder,
|
||||
}
|
||||
}
|
||||
|
||||
func (mw *Middleware) DebugNetCheck(cctx context.Context, req *pb.RpcDebugNetCheckRequest) *pb.RpcDebugNetCheckResponse {
|
||||
res, err := getService[debug.Debug](mw).NetCheck(cctx, req.ClientYml)
|
||||
if err != nil {
|
||||
return &pb.RpcDebugNetCheckResponse{
|
||||
Error: &pb.RpcDebugNetCheckResponseError{
|
||||
Code: pb.RpcDebugNetCheckResponseError_UNKNOWN_ERROR,
|
||||
Description: getErrorDescription(err),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return &pb.RpcDebugNetCheckResponse{
|
||||
Result: res,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
package debug
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
|
||||
"github.com/goccy/go-graphviz"
|
||||
|
@ -28,6 +29,10 @@ func GraphvizSvg(gv, svgFilename string) (err error) {
|
|||
}
|
||||
defer f.Close()
|
||||
|
||||
g := graphviz.New()
|
||||
return g.Render(gvo, graphviz.SVG, f)
|
||||
ctx := context.Background()
|
||||
g, err := graphviz.New(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return g.Render(ctx, gvo, graphviz.SVG, f)
|
||||
}
|
||||
|
|
317
core/debug/netcheck.go
Normal file
317
core/debug/netcheck.go
Normal file
|
@ -0,0 +1,317 @@
|
|||
package debug
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/anyproto/any-sync/app"
|
||||
"github.com/anyproto/any-sync/coordinator/coordinatorproto"
|
||||
"github.com/anyproto/any-sync/net/connutil"
|
||||
"github.com/anyproto/any-sync/net/secureservice"
|
||||
"github.com/anyproto/any-sync/net/secureservice/handshake"
|
||||
"github.com/anyproto/any-sync/net/secureservice/handshake/handshakeproto"
|
||||
"github.com/anyproto/any-sync/net/transport"
|
||||
"github.com/anyproto/any-sync/net/transport/quic"
|
||||
"github.com/anyproto/any-sync/net/transport/yamux"
|
||||
"github.com/anyproto/any-sync/nodeconf"
|
||||
"github.com/anyproto/any-sync/testutil/accounttest"
|
||||
"github.com/anyproto/go-chash"
|
||||
yamux2 "github.com/hashicorp/yamux"
|
||||
"github.com/matishsiao/goInfo"
|
||||
"gopkg.in/yaml.v3"
|
||||
"storj.io/drpc/drpcconn"
|
||||
)
|
||||
|
||||
func (d *debug) NetCheck(ctx context.Context, clientYml string) (string, error) {
|
||||
var sb strings.Builder
|
||||
var checkAddrs []string
|
||||
if clientYml != "" {
|
||||
file, err := os.ReadFile(clientYml)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var configFile nodeconf.Configuration
|
||||
if err := yaml.Unmarshal(file, &configFile); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for _, node := range configFile.Nodes {
|
||||
for _, t := range node.Types {
|
||||
if t == "coordinator" {
|
||||
for _, address := range node.Addresses {
|
||||
if !strings.HasPrefix(address, "quic://") {
|
||||
address = "yamux://" + address
|
||||
}
|
||||
checkAddrs = append(checkAddrs, address)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
checkAddrs = strings.Split(defaultAddrs, ",")
|
||||
}
|
||||
info, err := goInfo.GetInfo()
|
||||
if err != nil {
|
||||
sb.WriteString(fmt.Sprintf("error getting system info: %s\n", err))
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("system info: %s\n", info.String()))
|
||||
}
|
||||
|
||||
a := new(app.App)
|
||||
bootstrap(a)
|
||||
if err := a.Start(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, addr := range checkAddrs {
|
||||
addr = strings.TrimSpace(addr)
|
||||
switch {
|
||||
case strings.HasPrefix(addr, "yamux://"):
|
||||
res, err := probeYamux(ctx, a, addr[8:])
|
||||
if err != nil {
|
||||
sb.WriteString(fmt.Sprintf("error probing yamux %s: %s\n", addr, err))
|
||||
} else {
|
||||
sb.WriteString(res)
|
||||
}
|
||||
case strings.HasPrefix(addr, "quic://"):
|
||||
res, err := probeQuic(ctx, a, addr[7:])
|
||||
if err != nil {
|
||||
sb.WriteString(fmt.Sprintf("error probing yamux %s: %s\n", addr, err))
|
||||
} else {
|
||||
sb.WriteString(res)
|
||||
}
|
||||
default:
|
||||
return "", fmt.Errorf("unexpected address scheme: %s", addr)
|
||||
}
|
||||
}
|
||||
return sb.String(), err
|
||||
}
|
||||
|
||||
func probeYamux(ctx context.Context, a *app.App, addr string) (string, error) {
|
||||
var sb strings.Builder
|
||||
ss := a.MustComponent(secureservice.CName).(secureservice.SecureService)
|
||||
sb.WriteString(fmt.Sprintf("open TCP conn, addr: %s\n", addr))
|
||||
st := time.Now()
|
||||
conn, err := net.DialTimeout("tcp", addr, time.Second*60)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("open TCP conn error: %w, dur: %s", err, time.Since(st))
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("TCP conn established, ip:%s, dur: %s\n", conn.RemoteAddr().String(), time.Since(st)))
|
||||
}
|
||||
defer conn.Close()
|
||||
|
||||
sb.WriteString("start handshake\n")
|
||||
hst := time.Now()
|
||||
cctx, err := ss.SecureOutbound(ctx, conn)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("handshake error: %w, dur: %s", err, time.Since(hst))
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("handshake success, dur: %s, total: %s\n", time.Since(hst), time.Since(st)))
|
||||
}
|
||||
|
||||
yst := time.Now()
|
||||
sb.WriteString("open yamux session\n")
|
||||
sess, err := yamux2.Client(conn, yamux2.DefaultConfig())
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("yamux session error: %w, dur: %s", err, time.Since(yst))
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("yamux session success, dur: %s, total: %s\n", time.Since(yst), time.Since(st)))
|
||||
}
|
||||
|
||||
mc := yamux.NewMultiConn(cctx, connutil.NewLastUsageConn(conn), conn.RemoteAddr().String(), sess)
|
||||
sb.WriteString("open sub connection\n")
|
||||
scst := time.Now()
|
||||
sc, err := mc.Open(ctx)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("open sub connection error: %w, dur: %s", err, time.Since(scst))
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("open sub conn success, dur: %s, total: %s\n", time.Since(scst), time.Since(st)))
|
||||
defer sc.Close()
|
||||
}
|
||||
|
||||
sb.WriteString("start proto handshake\n")
|
||||
phst := time.Now()
|
||||
if err = handshake.OutgoingProtoHandshake(ctx, sc, handshakeproto.ProtoType_DRPC); err != nil {
|
||||
return "", fmt.Errorf("proto handshake error: %w, dur: %s", err, time.Since(phst))
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("proto handshake success, dur: %s, total: %s\n", time.Since(phst), time.Since(st)))
|
||||
}
|
||||
|
||||
sb.WriteString("start configuration request\n")
|
||||
rst := time.Now()
|
||||
resp, err := coordinatorproto.NewDRPCCoordinatorClient(drpcconn.New(sc)).NetworkConfiguration(ctx, &coordinatorproto.NetworkConfigurationRequest{})
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("configuration request error: %w, dur: %s", err, time.Since(rst))
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("configuration request success, dur: %s, total: %s, nid: %s\n", time.Since(rst), time.Since(st), resp.GetNetworkId()))
|
||||
}
|
||||
sb.WriteString(fmt.Sprintf("success, dur: %s\n", time.Since(st)))
|
||||
return sb.String(), nil
|
||||
}
|
||||
|
||||
func probeQuic(ctx context.Context, a *app.App, addr string) (string, error) {
|
||||
var sb strings.Builder
|
||||
qs := a.MustComponent(quic.CName).(quic.Quic)
|
||||
sb.WriteString(fmt.Sprintf("open QUIC conn, addr: %s\n", addr))
|
||||
st := time.Now()
|
||||
mc, err := qs.Dial(ctx, addr)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("open QUIC conn error: %w", err)
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("QUIC conn established, ip:%s, dur: %s\n", mc.Addr(), time.Since(st)))
|
||||
}
|
||||
defer mc.Close()
|
||||
|
||||
sb.WriteString("open sub connection\n")
|
||||
scst := time.Now()
|
||||
sc, err := mc.Open(ctx)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("open sub connection error: %w", err)
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("open sub conn success, dur: %s, total: %s\n", time.Since(scst), time.Since(st)))
|
||||
defer sc.Close()
|
||||
}
|
||||
|
||||
sb.WriteString("start proto handshake\n")
|
||||
phst := time.Now()
|
||||
if err = handshake.OutgoingProtoHandshake(ctx, sc, handshakeproto.ProtoType_DRPC); err != nil {
|
||||
return "", fmt.Errorf("proto handshake error: %w", err)
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("proto handshake success, dur: %s, total: %s\n", time.Since(phst), time.Since(st)))
|
||||
}
|
||||
|
||||
sb.WriteString("start configuration request\n")
|
||||
rst := time.Now()
|
||||
resp, err := coordinatorproto.NewDRPCCoordinatorClient(drpcconn.New(sc)).NetworkConfiguration(ctx, &coordinatorproto.NetworkConfigurationRequest{})
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("configuration request error: %w", err)
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("configuration request success, dur: %s, total: %s, nid: %s\n", time.Since(rst), time.Since(st), resp.GetNetworkId()))
|
||||
}
|
||||
sb.WriteString(fmt.Sprintf("success, dur: %s\n", time.Since(st)))
|
||||
return sb.String(), nil
|
||||
}
|
||||
|
||||
func bootstrap(a *app.App) {
|
||||
q := quic.New()
|
||||
a.Register(&config{}).
|
||||
Register(&nodeConf{}).
|
||||
Register(q).
|
||||
Register(&accounttest.AccountTestService{}).
|
||||
Register(secureservice.New())
|
||||
q.SetAccepter(new(accepter))
|
||||
}
|
||||
|
||||
type config struct {
|
||||
}
|
||||
|
||||
func (c config) Name() string { return "config" }
|
||||
func (c config) Init(a *app.App) error { return nil }
|
||||
|
||||
func (c config) GetYamux() yamux.Config {
|
||||
return yamux.Config{
|
||||
WriteTimeoutSec: 60,
|
||||
DialTimeoutSec: 60,
|
||||
KeepAlivePeriodSec: 120,
|
||||
}
|
||||
}
|
||||
|
||||
func (c config) GetQuic() quic.Config {
|
||||
return quic.Config{
|
||||
WriteTimeoutSec: 60,
|
||||
DialTimeoutSec: 60,
|
||||
KeepAlivePeriodSec: 120,
|
||||
}
|
||||
}
|
||||
|
||||
type nodeConf struct {
|
||||
}
|
||||
|
||||
func (n nodeConf) Id() string {
|
||||
return "test"
|
||||
}
|
||||
|
||||
func (n nodeConf) Configuration() nodeconf.Configuration {
|
||||
return nodeconf.Configuration{
|
||||
Id: "test",
|
||||
NetworkId: "",
|
||||
Nodes: nil,
|
||||
CreationTime: time.Time{},
|
||||
}
|
||||
}
|
||||
|
||||
func (n nodeConf) NodeIds(spaceId string) []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n nodeConf) IsResponsible(spaceId string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (n nodeConf) FilePeers() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n nodeConf) ConsensusPeers() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n nodeConf) CoordinatorPeers() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n nodeConf) NamingNodePeers() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n nodeConf) PaymentProcessingNodePeers() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n nodeConf) PeerAddresses(peerId string) (addrs []string, ok bool) {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func (n nodeConf) CHash() chash.CHash {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n nodeConf) Partition(spaceId string) (part int) {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (n nodeConf) NodeTypes(nodeId string) []nodeconf.NodeType {
|
||||
return []nodeconf.NodeType{nodeconf.NodeTypeCoordinator}
|
||||
}
|
||||
|
||||
func (n nodeConf) NetworkCompatibilityStatus() nodeconf.NetworkCompatibilityStatus {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (n nodeConf) Init(a *app.App) (err error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n nodeConf) Name() (name string) {
|
||||
return nodeconf.CName
|
||||
}
|
||||
|
||||
func (n nodeConf) Run(ctx context.Context) (err error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n nodeConf) Close(ctx context.Context) (err error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
type accepter struct {
|
||||
}
|
||||
|
||||
func (a accepter) Accept(mc transport.MultiConn) (err error) {
|
||||
return nil
|
||||
}
|
|
@ -27,6 +27,7 @@ import (
|
|||
"github.com/anyproto/anytype-heart/core/domain"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/localstore/objectstore/spaceindex"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/logging"
|
||||
"github.com/anyproto/anytype-heart/space"
|
||||
)
|
||||
|
@ -35,6 +36,16 @@ const CName = "debug"
|
|||
|
||||
var log = logging.Logger("anytype-debug")
|
||||
|
||||
const defaultAddrs = `
|
||||
yamux://prod-any-sync-coordinator1.anyclub.org:443,
|
||||
yamux://prod-any-sync-coordinator1.anyclub.org:1443,
|
||||
yamux://prod-any-sync-coordinator1.toolpad.org:443,
|
||||
yamux://prod-any-sync-coordinator1.toolpad.org:1443,
|
||||
yamux://prod-any-sync-coordinator1.anytype.io:443,
|
||||
yamux://prod-any-sync-coordinator1.anytype.io:1443,
|
||||
quic://prod-any-sync-coordinator1.anyclub.org:5430
|
||||
`
|
||||
|
||||
func New() Debug {
|
||||
return new(debug)
|
||||
}
|
||||
|
@ -48,6 +59,7 @@ type Debug interface {
|
|||
TreeHeads(ctx context.Context, id string) (info TreeInfo, err error)
|
||||
|
||||
DebugAnystoreObjectChanges(ctx context.Context, chatObjectId string, orderBy pb.RpcDebugAnystoreObjectChangesRequestOrderBy) ([]*pb.RpcDebugAnystoreObjectChangesResponseChange, bool, error)
|
||||
NetCheck(ctx context.Context, clientYml string) (string, error)
|
||||
}
|
||||
|
||||
type debug struct {
|
||||
|
@ -237,13 +249,6 @@ func (d *debug) DumpTree(ctx context.Context, objectID string, path string, anon
|
|||
}
|
||||
|
||||
func (d *debug) DumpLocalstore(ctx context.Context, spaceID string, objIds []string, path string) (filename string, err error) {
|
||||
if len(objIds) == 0 {
|
||||
objIds, err = d.store.ListIdsCrossSpace()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
filename = filepath.Join(path, fmt.Sprintf("at.store.dbg.%s.zip", time.Now().Format("20060102.150405.99")))
|
||||
f, err := os.Create(filename)
|
||||
if err != nil {
|
||||
|
@ -257,31 +262,38 @@ func (d *debug) DumpLocalstore(ctx context.Context, spaceID string, objIds []str
|
|||
var wr io.Writer
|
||||
m := jsonpb.Marshaler{Indent: " "}
|
||||
|
||||
store := d.store.SpaceIndex(spaceID)
|
||||
|
||||
for _, objId := range objIds {
|
||||
doc, err := store.GetWithLinksInfoById(objId)
|
||||
err = d.store.IterateSpaceIndex(func(store spaceindex.Store) error {
|
||||
objIds, err = store.ListIds()
|
||||
if err != nil {
|
||||
var err2 error
|
||||
wr, err2 = zw.Create(fmt.Sprintf("%s.txt", objId))
|
||||
if err2 != nil {
|
||||
return "", err
|
||||
return err
|
||||
}
|
||||
|
||||
for _, objId := range objIds {
|
||||
doc, err := store.GetWithLinksInfoById(objId)
|
||||
if err != nil {
|
||||
var err2 error
|
||||
wr, err2 = zw.Create(fmt.Sprintf("%s/%s.txt", store.SpaceId(), objId))
|
||||
if err2 != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, _ = wr.Write([]byte(err.Error()))
|
||||
continue
|
||||
}
|
||||
wr, err = zw.Create(fmt.Sprintf("%s/%s.json", store.SpaceId(), objId))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
wr.Write([]byte(err.Error()))
|
||||
continue
|
||||
}
|
||||
wr, err = zw.Create(fmt.Sprintf("%s.json", objId))
|
||||
if err != nil {
|
||||
return "", err
|
||||
err = m.Marshal(wr, doc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
err = m.Marshal(wr, doc)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
return filename, nil
|
||||
return filename, err
|
||||
}
|
||||
|
||||
func (d *debug) DebugAnystoreObjectChanges(ctx context.Context, chatObjectId string, orderBy pb.RpcDebugAnystoreObjectChangesRequestOrderBy) ([]*pb.RpcDebugAnystoreObjectChangesResponseChange, bool, error) {
|
||||
|
|
|
@ -115,7 +115,7 @@ func (mw *Middleware) ObjectSetIsFavorite(_ context.Context, req *pb.RpcObjectSe
|
|||
}
|
||||
return m
|
||||
}
|
||||
err := getService[detailservice.Service](mw).SetIsFavorite(req.ContextId, req.IsFavorite)
|
||||
err := getService[detailservice.Service](mw).SetIsFavorite(req.ContextId, req.IsFavorite, true)
|
||||
if err != nil {
|
||||
return response(pb.RpcObjectSetIsFavoriteResponseError_UNKNOWN_ERROR, err)
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue