mirror of
https://github.com/anyproto/anytype-heart.git
synced 2025-06-07 21:37:04 +09:00
GO-1822: added more tests and fixed existing
Signed-off-by: AnastasiaShemyakinskaya <shem98a@mail.ru>
This commit is contained in:
parent
9483ebae80
commit
ed1f3a9a4a
20 changed files with 969 additions and 674 deletions
|
@ -23,14 +23,12 @@ var (
|
|||
|
||||
ErrFileImportNoObjectsInZipArchive = fmt.Errorf("no objects in zip archive")
|
||||
ErrFileImportNoObjectsInDirectory = fmt.Errorf("no objects in directory")
|
||||
ErrFileImportSourceFileOpenError = fmt.Errorf("failed to open imported file")
|
||||
|
||||
ErrPbNotAnyBlockFormat = fmt.Errorf("file doesn't match Anyblock format ")
|
||||
|
||||
ErrWrongHTMLFormat = fmt.Errorf("html file has wrong structure")
|
||||
|
||||
ErrNoSnapshotToImport = fmt.Errorf("no snapshot to import") // for external import
|
||||
ErrCSVFileFormat = fmt.Errorf("csv file has wrong structure")
|
||||
)
|
||||
|
||||
type ConvertError struct {
|
||||
|
@ -145,8 +143,6 @@ func GetImportNotificationErrorCode(err error) model.ImportErrorCode {
|
|||
return model.Import_FILE_LOAD_ERROR
|
||||
case errors.Is(err, ErrWrongHTMLFormat):
|
||||
return model.Import_HTML_WRONG_HTML_STRUCTURE
|
||||
case errors.Is(err, ErrCSVFileFormat):
|
||||
return model.Import_CSV_WRONG_CSV_STRUCTURE
|
||||
case errors.Is(err, list.ErrInsufficientPermissions):
|
||||
return model.Import_INSUFFICIENT_PERMISSIONS
|
||||
default:
|
||||
|
@ -173,7 +169,7 @@ func IsNoObjectError(err error) bool {
|
|||
func isDefinedError(err error) bool {
|
||||
return errors.Is(err, ErrCancel) || errors.Is(err, ErrCsvLimitExceeded) || errors.Is(err, ErrNotionServerExceedRateLimit) ||
|
||||
errors.Is(err, ErrNotionServerIsUnavailable) || errors.Is(err, ErrFileLoad) || errors.Is(err, ErrPbNotAnyBlockFormat) ||
|
||||
errors.Is(err, ErrWrongHTMLFormat) || errors.Is(err, ErrFileImportSourceFileOpenError) || errors.Is(err, ErrCSVFileFormat)
|
||||
errors.Is(err, ErrWrongHTMLFormat)
|
||||
}
|
||||
|
||||
func GetGalleryResponseCode(err error) pb.RpcObjectImportExperienceResponseErrorCode {
|
||||
|
|
|
@ -2,7 +2,6 @@ package common
|
|||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
@ -37,7 +36,7 @@ func ProvideFileName(fileName string, filesSource source.Source, path string, te
|
|||
fileName = tempFile
|
||||
return nil
|
||||
}); err != nil {
|
||||
return "", false, fmt.Errorf("%w: %s", ErrFileImportSourceFileOpenError, err.Error())
|
||||
return "", false, err
|
||||
}
|
||||
return fileName, createFileBlock, nil
|
||||
}
|
||||
|
|
25
core/block/import/common/test/utils.go
Normal file
25
core/block/import/common/test/utils.go
Normal file
|
@ -0,0 +1,25 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"fmt"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func CreateEmptyZip(t *testing.T, zipFileName string) error {
|
||||
zipFile, err := os.Create(zipFileName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to create zip file: %w\n", err)
|
||||
}
|
||||
defer zipFile.Close()
|
||||
|
||||
zipWriter := zip.NewWriter(zipFile)
|
||||
defer func() {
|
||||
err = zipWriter.Close()
|
||||
assert.NoError(t, err)
|
||||
}()
|
||||
return nil
|
||||
}
|
|
@ -145,7 +145,7 @@ func (c *CSV) getSnapshotsAndObjectsIds(importSource source.Source,
|
|||
}
|
||||
csvTable, err := c.getCSVTable(fileReader, params.GetDelimiter())
|
||||
if err != nil {
|
||||
allErrors.Add(fmt.Errorf("%w, %s", common.ErrCSVFileFormat, err.Error()))
|
||||
allErrors.Add(err)
|
||||
return !allErrors.ShouldAbortImport(len(params.GetPath()), model.Import_Csv)
|
||||
}
|
||||
if params.TransposeRowsAndColumns && len(csvTable) != 0 {
|
||||
|
@ -160,7 +160,7 @@ func (c *CSV) getSnapshotsAndObjectsIds(importSource source.Source,
|
|||
allSnapshots = append(allSnapshots, snapshots...)
|
||||
return true
|
||||
}); iterateErr != nil {
|
||||
allErrors.Add(fmt.Errorf("%w: %s", common.ErrFileImportSourceFileOpenError, iterateErr.Error()))
|
||||
allErrors.Add(iterateErr)
|
||||
}
|
||||
return &Result{allObjectsIds, allSnapshots}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@ package csv
|
|||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
@ -11,11 +10,10 @@ import (
|
|||
"github.com/gogo/protobuf/types"
|
||||
"github.com/samber/lo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"golang.org/x/mod/module"
|
||||
"golang.org/x/mod/zip"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/editor/template"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/test"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
|
@ -29,7 +27,7 @@ func TestCsv_GetSnapshotsEmptyFile(t *testing.T) {
|
|||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{Path: []string{"testdata/test.csv"}},
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{Path: []string{filepath.Join("testdata", "test.csv")}},
|
||||
},
|
||||
Type: model.Import_Csv,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
|
@ -55,7 +53,7 @@ func TestCsv_GetSnapshots(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/Journal.csv"},
|
||||
Path: []string{filepath.Join("testdata", "Journal.csv")},
|
||||
UseFirstRowForRelations: true},
|
||||
},
|
||||
Type: model.Import_Csv,
|
||||
|
@ -88,7 +86,7 @@ func TestCsv_GetSnapshotsTable(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/Journal.csv"},
|
||||
Path: []string{filepath.Join("testdata", "Journal.csv")},
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_TABLE,
|
||||
},
|
||||
},
|
||||
|
@ -117,7 +115,7 @@ func TestCsv_GetSnapshotsTableUseFirstColumnForRelationsOn(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/Journal.csv"},
|
||||
Path: []string{filepath.Join("testdata", "Journal.csv")},
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_TABLE,
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -157,7 +155,11 @@ func TestCsv_GetSnapshotsSemiColon(t *testing.T) {
|
|||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{Path: []string{"testdata/semicolon.csv"}, Delimiter: ";", UseFirstRowForRelations: true},
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{filepath.Join("testdata", "semicolon.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
},
|
||||
Type: model.Import_Csv,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
|
@ -177,7 +179,7 @@ func TestCsv_GetSnapshotsTranspose(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/transpose.csv"},
|
||||
Path: []string{filepath.Join("testdata", "transpose.csv")},
|
||||
Delimiter: ";",
|
||||
TransposeRowsAndColumns: true,
|
||||
UseFirstRowForRelations: true,
|
||||
|
@ -217,7 +219,7 @@ func TestCsv_GetSnapshotsTransposeUseFirstRowForRelationsOff(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/transpose.csv"},
|
||||
Path: []string{filepath.Join("testdata", "transpose.csv")},
|
||||
Delimiter: ";",
|
||||
TransposeRowsAndColumns: true,
|
||||
UseFirstRowForRelations: false,
|
||||
|
@ -245,7 +247,7 @@ func TestCsv_GetSnapshotsUseFirstColumnForRelationsOn(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/Journal.csv"},
|
||||
Path: []string{filepath.Join("testdata", "Journal.csv")},
|
||||
Delimiter: ",",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -292,7 +294,7 @@ func TestCsv_GetSnapshotsUseFirstColumnForRelationsOff(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/Journal.csv"},
|
||||
Path: []string{filepath.Join("testdata", "Journal.csv")},
|
||||
Delimiter: ",",
|
||||
},
|
||||
},
|
||||
|
@ -347,7 +349,7 @@ func TestCsv_GetSnapshotsQuotedStrings(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/quotedstrings.csv"},
|
||||
Path: []string{filepath.Join("testdata", "quotedstrings.csv")},
|
||||
Delimiter: ",",
|
||||
TransposeRowsAndColumns: true,
|
||||
UseFirstRowForRelations: true,
|
||||
|
@ -368,7 +370,7 @@ func TestCsv_GetSnapshotsBigFile(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/bigfile.csv", "testdata/transpose.csv"},
|
||||
Path: []string{filepath.Join("testdata", "bigfile.csv"), filepath.Join("testdata", "transpose.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -389,7 +391,7 @@ func TestCsv_GetSnapshotsEmptyFirstLineUseFirstColumnForRelationsOn(t *testing.T
|
|||
sn, err := csv.GetSnapshots(ctx, &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/emptyfirstline.csv"},
|
||||
Path: []string{filepath.Join("testdata", "emptyfirstline.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -417,7 +419,7 @@ func TestCsv_GetSnapshotsEmptyFirstLineUseFirstColumnForRelationsOff(t *testing.
|
|||
sn, err := csv.GetSnapshots(ctx, &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/emptyfirstline.csv"},
|
||||
Path: []string{filepath.Join("testdata", "emptyfirstline.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: false,
|
||||
},
|
||||
|
@ -464,7 +466,7 @@ func TestCsv_GetSnapshots1000RowsFile(t *testing.T) {
|
|||
sn, _ := csv.GetSnapshots(ctx, &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/1000_rows.csv"},
|
||||
Path: []string{filepath.Join("testdata", "1000_rows.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: false,
|
||||
},
|
||||
|
@ -490,7 +492,7 @@ func TestCsv_GetSnapshots1000RowsFile(t *testing.T) {
|
|||
sn, _ = csv.GetSnapshots(ctx, &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/1000_rows.csv"},
|
||||
Path: []string{filepath.Join("testdata", "1000_rows.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -569,7 +571,7 @@ func Test_findUniqueRelationWithSpaces(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/relationswithspaces.csv"},
|
||||
Path: []string{filepath.Join("testdata", "relationswithspaces.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -612,7 +614,7 @@ func TestCsv_GetSnapshots10Relations(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/10_relations.csv"},
|
||||
Path: []string{filepath.Join("testdata", "10_relations.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: false,
|
||||
},
|
||||
|
@ -643,7 +645,7 @@ func TestCsv_GetSnapshots10Relations(t *testing.T) {
|
|||
sn, err = csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/10_relations.csv"},
|
||||
Path: []string{filepath.Join("testdata", "10_relations.csv")},
|
||||
Delimiter: ";",
|
||||
UseFirstRowForRelations: true,
|
||||
},
|
||||
|
@ -681,7 +683,7 @@ func TestCsv_GetSnapshotsTableModeDifferentColumnsNumber(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/differentcolumnnumber.csv"},
|
||||
Path: []string{filepath.Join("testdata", "differentcolumnnumber.csv")},
|
||||
Delimiter: ",",
|
||||
UseFirstRowForRelations: true,
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_TABLE,
|
||||
|
@ -719,7 +721,7 @@ func TestCsv_GetSnapshotsTableModeDifferentColumnsNumber(t *testing.T) {
|
|||
sn, err := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{"testdata/differentcolumnnumber.csv"},
|
||||
Path: []string{filepath.Join("testdata", "differentcolumnnumber.csv")},
|
||||
Delimiter: ",",
|
||||
UseFirstRowForRelations: true,
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_COLLECTION,
|
||||
|
@ -782,14 +784,9 @@ func TestCSV_GetSnapshots(t *testing.T) {
|
|||
t.Run("no object in archive", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
file := "archive.zip"
|
||||
filePath := filepath.Join(dir, file)
|
||||
tmpZip, err := os.Create(filePath)
|
||||
f, err := os.CreateTemp(dir, filepath.Join("test", "test"))
|
||||
zipPath := filepath.Join(dir, "empty.zip")
|
||||
err := test.CreateEmptyZip(t, zipPath)
|
||||
assert.Nil(t, err)
|
||||
err = zip.Create(tmpZip, module.Version{Path: dir}, []zip.File{*f})
|
||||
assert.Nil(t, err)
|
||||
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
|
||||
|
@ -797,7 +794,7 @@ func TestCSV_GetSnapshots(t *testing.T) {
|
|||
_, ce := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{filePath},
|
||||
Path: []string{zipPath},
|
||||
Delimiter: ",",
|
||||
UseFirstRowForRelations: true,
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_TABLE,
|
||||
|
@ -809,7 +806,31 @@ func TestCSV_GetSnapshots(t *testing.T) {
|
|||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.NotNil(t, errors.Is(ce.GetResultError(model.Import_Csv), common.ErrFileImportNoObjectsInZipArchive))
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Csv), common.ErrFileImportNoObjectsInZipArchive))
|
||||
})
|
||||
t.Run("no object in dir", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
csv := CSV{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
|
||||
// when
|
||||
_, ce := csv.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfCsvParams{
|
||||
CsvParams: &pb.RpcObjectImportRequestCsvParams{
|
||||
Path: []string{dir},
|
||||
Delimiter: ",",
|
||||
UseFirstRowForRelations: true,
|
||||
Mode: pb.RpcObjectImportRequestCsvParams_TABLE,
|
||||
},
|
||||
},
|
||||
Type: model.Import_Csv,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Csv), common.ErrFileImportNoObjectsInDirectory))
|
||||
})
|
||||
}
|
||||
func getRelationsNumber(keys []string) int {
|
||||
|
|
|
@ -149,7 +149,7 @@ func (h *HTML) getSnapshotsAndRootObjects(path string,
|
|||
rootObjects = append(rootObjects, id)
|
||||
return true
|
||||
}); iterateErr != nil {
|
||||
allErrors.Add(fmt.Errorf("%w: %s", common.ErrFileImportSourceFileOpenError, iterateErr.Error()))
|
||||
allErrors.Add(iterateErr)
|
||||
}
|
||||
return snapshots, rootObjects
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import (
|
|||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/source"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/test"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
|
@ -28,28 +29,79 @@ func (p *MockTempDirProvider) TempDir() string {
|
|||
}
|
||||
|
||||
func TestHTML_GetSnapshots(t *testing.T) {
|
||||
h := &HTML{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
sn, err := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfHtmlParams{
|
||||
HtmlParams: &pb.RpcObjectImportRequestHtmlParams{Path: []string{"testdata/test.html", "testdata/test"}},
|
||||
},
|
||||
Type: model.Import_Html,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
t.Run("success", func(t *testing.T) {
|
||||
h := &HTML{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
sn, err := h.GetSnapshots(
|
||||
context.Background(),
|
||||
&pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfHtmlParams{
|
||||
HtmlParams: &pb.RpcObjectImportRequestHtmlParams{Path: []string{filepath.Join("testdata", "test.html"), filepath.Join("testdata", "test")}},
|
||||
},
|
||||
Type: model.Import_Html,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
},
|
||||
p,
|
||||
)
|
||||
|
||||
assert.NotNil(t, sn)
|
||||
assert.Len(t, sn.Snapshots, 2)
|
||||
assert.Contains(t, sn.Snapshots[0].FileName, "test.html")
|
||||
assert.NotEmpty(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"])
|
||||
assert.Equal(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"], pbtypes.String("test"))
|
||||
assert.NotNil(t, sn)
|
||||
assert.Len(t, sn.Snapshots, 2)
|
||||
assert.Contains(t, sn.Snapshots[0].FileName, "test.html")
|
||||
assert.NotEmpty(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"])
|
||||
assert.Equal(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"], pbtypes.String("test"))
|
||||
|
||||
assert.Contains(t, sn.Snapshots[1].FileName, rootCollectionName)
|
||||
assert.NotEmpty(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes)
|
||||
assert.Equal(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes[0], bundle.TypeKeyCollection.String())
|
||||
assert.Contains(t, sn.Snapshots[1].FileName, rootCollectionName)
|
||||
assert.NotEmpty(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes)
|
||||
assert.Equal(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes[0], bundle.TypeKeyCollection.String())
|
||||
|
||||
assert.NotEmpty(t, err)
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Html), common.ErrFileImportNoObjectsInDirectory))
|
||||
assert.NotEmpty(t, err)
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Html), common.ErrFileImportNoObjectsInDirectory))
|
||||
})
|
||||
t.Run("no object in archive", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
zipPath := filepath.Join(dir, "empty.zip")
|
||||
err := test.CreateEmptyZip(t, zipPath)
|
||||
assert.Nil(t, err)
|
||||
html := HTML{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
|
||||
// when
|
||||
_, ce := html.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfHtmlParams{
|
||||
HtmlParams: &pb.RpcObjectImportRequestHtmlParams{
|
||||
Path: []string{zipPath},
|
||||
},
|
||||
},
|
||||
Type: model.Import_Html,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Html), common.ErrFileImportNoObjectsInZipArchive))
|
||||
})
|
||||
t.Run("no object in dir", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
html := HTML{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
|
||||
// when
|
||||
_, ce := html.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfHtmlParams{
|
||||
HtmlParams: &pb.RpcObjectImportRequestHtmlParams{
|
||||
Path: []string{dir},
|
||||
},
|
||||
},
|
||||
Type: model.Import_Html,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Html), common.ErrFileImportNoObjectsInDirectory))
|
||||
})
|
||||
}
|
||||
|
||||
func TestHTML_provideFileName(t *testing.T) {
|
||||
|
@ -73,9 +125,10 @@ func TestHTML_provideFileName(t *testing.T) {
|
|||
currentDir, err := os.Getwd()
|
||||
assert.Nil(t, err)
|
||||
source := source.GetSource(currentDir)
|
||||
filePath := filepath.Join("testdata", "test")
|
||||
|
||||
// when
|
||||
absPath, err := filepath.Abs("testdata/test")
|
||||
absPath, err := filepath.Abs(filePath)
|
||||
assert.Nil(t, err)
|
||||
newFileName, _, err := common.ProvideFileName(absPath, source, currentDir, h.tempDirProvider)
|
||||
|
||||
|
@ -89,13 +142,14 @@ func TestHTML_provideFileName(t *testing.T) {
|
|||
currentDir, err := os.Getwd()
|
||||
assert.Nil(t, err)
|
||||
source := source.GetSource(currentDir)
|
||||
filePath := filepath.Join("testdata", "test")
|
||||
|
||||
// when
|
||||
newFileName, _, err := common.ProvideFileName("testdata/test", source, currentDir, h.tempDirProvider)
|
||||
newFileName, _, err := common.ProvideFileName(filePath, source, currentDir, h.tempDirProvider)
|
||||
|
||||
// then
|
||||
assert.Nil(t, err)
|
||||
absPath, err := filepath.Abs("testdata/test")
|
||||
absPath, err := filepath.Abs(filePath)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, absPath, newFileName)
|
||||
})
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package markdown
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
@ -76,7 +75,7 @@ func (m *mdConverter) getFileInfo(importSource source.Source, allErrors *common.
|
|||
}
|
||||
return true
|
||||
}); iterateErr != nil {
|
||||
allErrors.Add(fmt.Errorf("%w: %s", common.ErrFileImportSourceFileOpenError, iterateErr.Error()))
|
||||
allErrors.Add(iterateErr)
|
||||
}
|
||||
return fileInfo
|
||||
}
|
||||
|
|
|
@ -24,15 +24,18 @@ func Test_processFiles(t *testing.T) {
|
|||
t.Run("imported directory include mov and pdf files - md file has file blocks", func(t *testing.T) {
|
||||
// given
|
||||
converter := newMDConverter(&MockTempDir{})
|
||||
_, err := os.Create("./testdata/test.pdf")
|
||||
pdfFile := filepath.Join("testdata", "test.pdf")
|
||||
_, err := os.Create(pdfFile)
|
||||
assert.Nil(t, err)
|
||||
defer os.Remove("./testdata/test.pdf")
|
||||
_, err = os.Create("./testdata/test.mov")
|
||||
defer os.Remove(pdfFile)
|
||||
|
||||
movFile := filepath.Join("testdata", "test.mov")
|
||||
_, err = os.Create(movFile)
|
||||
assert.Nil(t, err)
|
||||
defer os.Remove("./testdata/test.mov")
|
||||
defer os.Remove(movFile)
|
||||
|
||||
workingDir, err := os.Getwd()
|
||||
absolutePath := filepath.Join(workingDir, "./testdata")
|
||||
absolutePath := filepath.Join(workingDir, "testdata")
|
||||
source := source.GetSource(absolutePath)
|
||||
|
||||
// when
|
||||
|
@ -62,10 +65,10 @@ func Test_processFiles(t *testing.T) {
|
|||
t.Run("imported directory include without mov and pdf files - no file blocks", func(t *testing.T) {
|
||||
// given
|
||||
converter := newMDConverter(&MockTempDir{})
|
||||
source := source.GetSource("./testdata")
|
||||
source := source.GetSource("testdata")
|
||||
workingDir, err := os.Getwd()
|
||||
assert.Nil(t, err)
|
||||
absolutePath := filepath.Join(workingDir, "./testdata")
|
||||
absolutePath := filepath.Join(workingDir, "testdata")
|
||||
|
||||
// when
|
||||
files := converter.processFiles(absolutePath, common.NewError(pb.RpcObjectImportRequest_IGNORE_ERRORS), source)
|
||||
|
|
|
@ -2,12 +2,15 @@ package markdown
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/test"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
|
@ -39,7 +42,7 @@ func TestMarkdown_GetSnapshots(t *testing.T) {
|
|||
subPageId string
|
||||
)
|
||||
for _, snapshot := range sn.Snapshots {
|
||||
if snapshot.FileName == filepath.Join(testDirectory, "test_database/test.md") {
|
||||
if snapshot.FileName == filepath.Join(testDirectory, "test_database", "test.md") {
|
||||
subPageId = snapshot.Id
|
||||
break
|
||||
}
|
||||
|
@ -55,7 +58,6 @@ func TestMarkdown_GetSnapshots(t *testing.T) {
|
|||
}
|
||||
assert.True(t, found)
|
||||
})
|
||||
|
||||
t.Run("no object error", func(t *testing.T) {
|
||||
// given
|
||||
testDirectory := t.TempDir()
|
||||
|
@ -113,6 +115,30 @@ func TestMarkdown_GetSnapshots(t *testing.T) {
|
|||
}
|
||||
assert.True(t, found)
|
||||
})
|
||||
t.Run("no object in archive", func(t *testing.T) {
|
||||
// given
|
||||
testDirectory := t.TempDir()
|
||||
zipPath := filepath.Join(testDirectory, "empty.zip")
|
||||
err := test.CreateEmptyZip(t, zipPath)
|
||||
assert.Nil(t, err)
|
||||
|
||||
h := &Markdown{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
|
||||
// when
|
||||
sn, ce := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfMarkdownParams{
|
||||
MarkdownParams: &pb.RpcObjectImportRequestMarkdownParams{Path: []string{zipPath}},
|
||||
},
|
||||
Type: model.Import_Markdown,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
}, p)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.Nil(t, sn)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Markdown), common.ErrFileImportNoObjectsInZipArchive))
|
||||
})
|
||||
}
|
||||
|
||||
func buildExpectedTree(fileNameToObjectId map[string]string, provider *MockTempDir, rootId string) *blockbuilder.Block {
|
||||
|
|
|
@ -22,7 +22,7 @@ func TransformHTTPCodeToError(response []byte) error {
|
|||
if notionErr.Status >= 500 {
|
||||
return fmt.Errorf("%w: %s", common.ErrNotionServerIsUnavailable, notionErr.Message)
|
||||
}
|
||||
if notionErr.Status >= 429 {
|
||||
if notionErr.Status == 429 {
|
||||
return fmt.Errorf("%w: %s", common.ErrNotionServerExceedRateLimit, notionErr.Message)
|
||||
}
|
||||
return fmt.Errorf("status: %d, code: %s, message: %s", notionErr.Status, notionErr.Code, notionErr.Message)
|
||||
|
|
|
@ -1,15 +1,112 @@
|
|||
package notion
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/notion/api/client"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/notion/api/database"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/notion/api/page"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/notion/api/property"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/notion/api/search"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/pb/model"
|
||||
)
|
||||
|
||||
func TestNotion_GetSnapshots(t *testing.T) {
|
||||
t.Run("internal error from Notion", func(t *testing.T) {
|
||||
// given
|
||||
converter := &Notion{}
|
||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
w.Write([]byte(`{"object":"error","status":500,"code":"internal_error","message":"internal server error"}`))
|
||||
}))
|
||||
defer s.Close()
|
||||
c := client.NewClient()
|
||||
c.BasePath = s.URL
|
||||
converter.search = search.New(c)
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
|
||||
// when
|
||||
_, ce := converter.GetSnapshots(
|
||||
context.Background(),
|
||||
&pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfNotionParams{NotionParams: &pb.RpcObjectImportRequestNotionParams{ApiKey: "key"}},
|
||||
Type: model.Import_Markdown,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
},
|
||||
p,
|
||||
)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Notion), common.ErrNotionServerIsUnavailable))
|
||||
})
|
||||
t.Run("rate limit error from Notion", func(t *testing.T) {
|
||||
// given
|
||||
converter := &Notion{}
|
||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusTooManyRequests)
|
||||
w.Write([]byte(`{"object":"error","status":429,"code":"rate_limit_error","message":"rate limit error"}`))
|
||||
}))
|
||||
defer s.Close()
|
||||
c := client.NewClient()
|
||||
c.BasePath = s.URL
|
||||
converter.search = search.New(c)
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
|
||||
// when
|
||||
_, ce := converter.GetSnapshots(
|
||||
context.Background(),
|
||||
&pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfNotionParams{NotionParams: &pb.RpcObjectImportRequestNotionParams{ApiKey: "key"}},
|
||||
Type: model.Import_Markdown,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
},
|
||||
p,
|
||||
)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Notion), common.ErrNotionServerExceedRateLimit))
|
||||
})
|
||||
t.Run("no objects in integration", func(t *testing.T) {
|
||||
// given
|
||||
converter := &Notion{}
|
||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte(`{"object":"list","results":[]}`))
|
||||
}))
|
||||
defer s.Close()
|
||||
c := client.NewClient()
|
||||
c.BasePath = s.URL
|
||||
converter.search = search.New(c)
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
|
||||
// when
|
||||
_, ce := converter.GetSnapshots(
|
||||
context.Background(),
|
||||
&pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfNotionParams{NotionParams: &pb.RpcObjectImportRequestNotionParams{ApiKey: "key"}},
|
||||
Type: model.Import_Markdown,
|
||||
Mode: pb.RpcObjectImportRequest_IGNORE_ERRORS,
|
||||
},
|
||||
p,
|
||||
)
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Notion), common.ErrNoObjectInIntegration))
|
||||
})
|
||||
}
|
||||
|
||||
func TestNotion_getUniqueProperties(t *testing.T) {
|
||||
t.Run("Page and Database have the same property - 1 unique item", func(t *testing.T) {
|
||||
// given
|
||||
|
|
|
@ -39,7 +39,6 @@ const (
|
|||
)
|
||||
|
||||
var ErrNotAnyBlockExtension = errors.New("not JSON or PB extension")
|
||||
var ErrWrongFormat = errors.New("wrong PB or JSON format")
|
||||
|
||||
type Pb struct {
|
||||
service *collection.Service
|
||||
|
@ -190,7 +189,7 @@ func (p *Pb) getProfileFromFiles(importSource source.Source) (*pb.Profile, error
|
|||
return true
|
||||
})
|
||||
if iterateError != nil {
|
||||
return nil, fmt.Errorf("%w: %s", common.ErrFileImportSourceFileOpenError, iterateError.Error())
|
||||
return nil, iterateError
|
||||
}
|
||||
return profile, err
|
||||
}
|
||||
|
@ -249,7 +248,7 @@ func (p *Pb) getSnapshotsFromProvidedFiles(
|
|||
}
|
||||
return true
|
||||
}); iterateErr != nil {
|
||||
allErrors.Add(fmt.Errorf("%w: %s", common.ErrFileImportSourceFileOpenError, iterateErr.Error()))
|
||||
allErrors.Add(iterateErr)
|
||||
}
|
||||
return allSnapshots, widgetSnapshot, workspaceSnapshot
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/test"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
|
@ -29,7 +30,7 @@ func Test_GetSnapshotsSuccess(t *testing.T) {
|
|||
defer os.RemoveAll(path)
|
||||
wr, err := newZipWriter(path)
|
||||
assert.NoError(t, err)
|
||||
f, err := os.Open("testdata/bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb")
|
||||
f, err := os.Open(filepath.Join("testdata", "bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb"))
|
||||
reader := bufio.NewReader(f)
|
||||
|
||||
assert.NoError(t, err)
|
||||
|
@ -77,7 +78,7 @@ func Test_GetSnapshotsFailedToGetSnapshot(t *testing.T) {
|
|||
defer os.RemoveAll(path)
|
||||
wr, err := newZipWriter(path)
|
||||
assert.NoError(t, err)
|
||||
f, err := os.Open("testdata/test.pb")
|
||||
f, err := os.Open(filepath.Join("testdata", "test.pb"))
|
||||
reader := bufio.NewReader(f)
|
||||
|
||||
assert.NoError(t, err)
|
||||
|
@ -106,7 +107,7 @@ func Test_GetSnapshotsEmptySnapshot(t *testing.T) {
|
|||
defer os.RemoveAll(path)
|
||||
wr, err := newZipWriter(path)
|
||||
assert.NoError(t, err)
|
||||
f, err := os.Open("testdata/emptysnapshot.pb.json")
|
||||
f, err := os.Open(filepath.Join("testdata", "emptysnapshot.pb.json"))
|
||||
reader := bufio.NewReader(f)
|
||||
|
||||
assert.NoError(t, err)
|
||||
|
@ -132,7 +133,7 @@ func Test_GetSnapshotsEmptySnapshot(t *testing.T) {
|
|||
func Test_GetSnapshotsFailedToGetSnapshotForTwoFiles(t *testing.T) {
|
||||
p := &Pb{}
|
||||
|
||||
paths := []string{"testdata/bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb", "testdata/test.pb"}
|
||||
paths := []string{filepath.Join("testdata", "bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb"), filepath.Join("testdata", "test.pb")}
|
||||
// ALL_OR_NOTHING mode
|
||||
res, ce := p.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfPbParams{PbParams: &pb.RpcObjectImportRequestPbParams{
|
||||
|
@ -165,7 +166,7 @@ func Test_GetSnapshotsFailedToGetSnapshotForTwoFiles(t *testing.T) {
|
|||
func Test_GetSnapshotsWithoutRootCollection(t *testing.T) {
|
||||
p := &Pb{}
|
||||
|
||||
path := "testdata/bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb"
|
||||
path := filepath.Join("testdata", "bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb")
|
||||
res, ce := p.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfPbParams{PbParams: &pb.RpcObjectImportRequestPbParams{
|
||||
Path: []string{path},
|
||||
|
@ -188,13 +189,13 @@ func Test_GetSnapshotsSkipFileWithoutExtension(t *testing.T) {
|
|||
wr, err := newZipWriter(path)
|
||||
assert.NoError(t, err)
|
||||
|
||||
f, err := os.Open("testdata/bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb")
|
||||
f, err := os.Open(filepath.Join("testdata", "bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb"))
|
||||
assert.NoError(t, err)
|
||||
reader := bufio.NewReader(f)
|
||||
|
||||
assert.NoError(t, wr.WriteFile("bafyreig5sd7mlmhindapjuvzc4gnetdbszztb755sa7nflojkljmu56mmi.pb", reader))
|
||||
|
||||
f, err = os.Open("testdata/test")
|
||||
f, err = os.Open(filepath.Join("testdata", "test"))
|
||||
assert.NoError(t, err)
|
||||
reader = bufio.NewReader(f)
|
||||
|
||||
|
@ -220,6 +221,44 @@ func Test_GetSnapshotsSkipFileWithoutExtension(t *testing.T) {
|
|||
assert.Contains(t, res.Snapshots[1].FileName, rootCollectionName)
|
||||
}
|
||||
|
||||
func TestPb_GetSnapshots(t *testing.T) {
|
||||
t.Run("no objects in dir", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
p := &Pb{}
|
||||
|
||||
// when
|
||||
_, ce := p.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfPbParams{PbParams: &pb.RpcObjectImportRequestPbParams{
|
||||
Path: []string{dir},
|
||||
}},
|
||||
}, process.NewProgress(pb.ModelProcess_Import))
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Pb), common.ErrFileImportNoObjectsInDirectory))
|
||||
})
|
||||
t.Run("no objects in archive", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
p := &Pb{}
|
||||
zipPath := filepath.Join(dir, "empty.zip")
|
||||
err := test.CreateEmptyZip(t, zipPath)
|
||||
assert.Nil(t, err)
|
||||
|
||||
// when
|
||||
_, ce := p.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfPbParams{PbParams: &pb.RpcObjectImportRequestPbParams{
|
||||
Path: []string{zipPath},
|
||||
}},
|
||||
}, process.NewProgress(pb.ModelProcess_Import))
|
||||
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Pb), common.ErrFileImportNoObjectsInZipArchive))
|
||||
})
|
||||
}
|
||||
|
||||
func newZipWriter(path string) (*zipWriter, error) {
|
||||
filename := filepath.Join(path, "Anytype"+strconv.FormatInt(rand.Int63(), 10)+".zip")
|
||||
f, err := os.Create(filename)
|
||||
|
|
|
@ -2,7 +2,6 @@ package txt
|
|||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"path/filepath"
|
||||
|
||||
|
@ -137,7 +136,7 @@ func (t *TXT) handleImportPath(p string, pathsCount int, allErrors *common.Conve
|
|||
return true
|
||||
})
|
||||
if iterateErr != nil {
|
||||
allErrors.Add(fmt.Errorf("%w: %s", common.ErrFileImportSourceFileOpenError, iterateErr.Error()))
|
||||
allErrors.Add(iterateErr)
|
||||
}
|
||||
return snapshots, targetObjects
|
||||
}
|
||||
|
|
|
@ -3,11 +3,13 @@ package txt
|
|||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common"
|
||||
"github.com/anyproto/anytype-heart/core/block/import/common/test"
|
||||
"github.com/anyproto/anytype-heart/core/block/process"
|
||||
"github.com/anyproto/anytype-heart/pb"
|
||||
"github.com/anyproto/anytype-heart/pkg/lib/bundle"
|
||||
|
@ -16,40 +18,82 @@ import (
|
|||
)
|
||||
|
||||
func TestTXT_GetSnapshots(t *testing.T) {
|
||||
h := &TXT{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
sn, err := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfTxtParams{
|
||||
TxtParams: &pb.RpcObjectImportRequestTxtParams{Path: []string{"testdata/test.txt", "testdata/test"}},
|
||||
},
|
||||
Type: 4,
|
||||
Mode: 1,
|
||||
}, p)
|
||||
t.Run("success", func(t *testing.T) {
|
||||
h := &TXT{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
sn, err := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfTxtParams{
|
||||
TxtParams: &pb.RpcObjectImportRequestTxtParams{Path: []string{filepath.Join("testdata", "test.txt"), filepath.Join("testdata", "test")}},
|
||||
},
|
||||
Type: 4,
|
||||
Mode: 1,
|
||||
}, p)
|
||||
|
||||
assert.NotNil(t, err)
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Txt), common.ErrFileImportNoObjectsInDirectory))
|
||||
assert.NotNil(t, sn)
|
||||
assert.Len(t, sn.Snapshots, 2)
|
||||
assert.Contains(t, sn.Snapshots[0].FileName, "test.txt")
|
||||
assert.NotEmpty(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"])
|
||||
assert.Equal(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"], pbtypes.String("test"))
|
||||
assert.NotNil(t, err)
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Txt), common.ErrFileImportNoObjectsInDirectory))
|
||||
assert.NotNil(t, sn)
|
||||
assert.Len(t, sn.Snapshots, 2)
|
||||
assert.Contains(t, sn.Snapshots[0].FileName, "test.txt")
|
||||
assert.NotEmpty(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"])
|
||||
assert.Equal(t, sn.Snapshots[0].Snapshot.Data.Details.Fields["name"], pbtypes.String("test"))
|
||||
|
||||
assert.Contains(t, sn.Snapshots[1].FileName, rootCollectionName)
|
||||
assert.NotEmpty(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes)
|
||||
assert.Equal(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes[0], bundle.TypeKeyCollection.String())
|
||||
assert.Contains(t, sn.Snapshots[1].FileName, rootCollectionName)
|
||||
assert.NotEmpty(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes)
|
||||
assert.Equal(t, sn.Snapshots[1].Snapshot.Data.ObjectTypes[0], bundle.TypeKeyCollection.String())
|
||||
|
||||
var (
|
||||
found bool
|
||||
text string
|
||||
)
|
||||
var (
|
||||
found bool
|
||||
text string
|
||||
)
|
||||
|
||||
for _, block := range sn.Snapshots[0].Snapshot.Data.GetBlocks() {
|
||||
if t, ok := block.Content.(*model.BlockContentOfText); ok {
|
||||
found = ok
|
||||
text = t.Text.GetText()
|
||||
for _, block := range sn.Snapshots[0].Snapshot.Data.GetBlocks() {
|
||||
if t, ok := block.Content.(*model.BlockContentOfText); ok {
|
||||
found = ok
|
||||
text = t.Text.GetText()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert.Equal(t, text, "test")
|
||||
assert.True(t, found)
|
||||
assert.Equal(t, text, "test")
|
||||
assert.True(t, found)
|
||||
})
|
||||
t.Run("no objects in dir", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
h := &TXT{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
|
||||
// when
|
||||
_, err := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfTxtParams{
|
||||
TxtParams: &pb.RpcObjectImportRequestTxtParams{Path: []string{dir}},
|
||||
},
|
||||
Type: 4,
|
||||
Mode: 1,
|
||||
}, p)
|
||||
// then
|
||||
assert.NotNil(t, err)
|
||||
assert.True(t, errors.Is(err.GetResultError(model.Import_Pb), common.ErrFileImportNoObjectsInDirectory))
|
||||
})
|
||||
t.Run("no objects in archive", func(t *testing.T) {
|
||||
// given
|
||||
dir := t.TempDir()
|
||||
zipPath := filepath.Join(dir, "empty.zip")
|
||||
err := test.CreateEmptyZip(t, zipPath)
|
||||
assert.Nil(t, err)
|
||||
|
||||
h := &TXT{}
|
||||
p := process.NewProgress(pb.ModelProcess_Import)
|
||||
|
||||
// when
|
||||
_, ce := h.GetSnapshots(context.Background(), &pb.RpcObjectImportRequest{
|
||||
Params: &pb.RpcObjectImportRequestParamsOfTxtParams{
|
||||
TxtParams: &pb.RpcObjectImportRequestTxtParams{Path: []string{zipPath}},
|
||||
},
|
||||
Type: 4,
|
||||
Mode: 1,
|
||||
}, p)
|
||||
// then
|
||||
assert.NotNil(t, ce)
|
||||
assert.True(t, errors.Is(ce.GetResultError(model.Import_Pb), common.ErrFileImportNoObjectsInZipArchive))
|
||||
})
|
||||
}
|
||||
|
|
|
@ -24,4 +24,3 @@
|
|||
|
||||
### CSV import specific codes
|
||||
1. CSV_LIMIT_OF_ROWS_OR_RELATIONS_EXCEEDED - user tried to import CSV file, where amount of rows or columns exceeded 1000
|
||||
2. CSV_WRONG_CSV_STRUCTURE - there was error with parsing CSV file
|
|
@ -29277,7 +29277,6 @@ stored |
|
|||
| HTML_WRONG_HTML_STRUCTURE | 10 | |
|
||||
| PB_NOT_ANYBLOCK_FORMAT | 11 | |
|
||||
| CSV_LIMIT_OF_ROWS_OR_RELATIONS_EXCEEDED | 7 | |
|
||||
| CSV_WRONG_CSV_STRUCTURE | 16 | |
|
||||
| INSUFFICIENT_PERMISSIONS | 9 | |
|
||||
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1140,7 +1140,6 @@ message Import {
|
|||
PB_NOT_ANYBLOCK_FORMAT = 11;
|
||||
|
||||
CSV_LIMIT_OF_ROWS_OR_RELATIONS_EXCEEDED = 7;
|
||||
CSV_WRONG_CSV_STRUCTURE = 16;
|
||||
|
||||
INSUFFICIENT_PERMISSIONS = 9;
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue