1

所以我调用了这个方法,它在微软为 documentdb 提供的示例代码中,但是在尝试创建新的存储过程时我得到了一个空响应。

private static async Task RunBulkImport(string collectionLink)
    {


        string Datafilepath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Data");
        string inputDirectory = Datafilepath;
        string inputFileMask = "*.json";
        int maxFiles = 2000;
        int maxScriptSize = 50000;

        // 1. Get the files.
        string[] fileNames = Directory.GetFiles(inputDirectory, inputFileMask);
        DirectoryInfo di = new DirectoryInfo(inputDirectory);
        FileInfo[] fileInfos = di.GetFiles(inputFileMask);

        // 2. Prepare for import.
        int currentCount = 0;
        int fileCount = maxFiles != 0 ? Math.Min(maxFiles, fileNames.Length) : fileNames.Length;

        // 3. Create stored procedure for this script.
        string procedurepath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"SP\BulkImport.js");
        string body = File.ReadAllText(procedurepath);
        StoredProcedure sproc = new StoredProcedure
        {
            Id = "BulkImport",
            Body = body
        };

        await TryDeleteStoredProcedure(collectionLink, sproc.Id);
        sproc = await client.CreateStoredProcedureAsync(collectionLink, sproc);

        while (currentCount < fileCount)
        {
            // 5. Create args for current batch.
            //    Note that we could send a string with serialized JSON and JSON.parse it on the script side,
            //    but that would cause script to run longer. Since script has timeout, unload the script as much
            //    as we can and do the parsing by client and framework. The script will get JavaScript objects.
            string argsJson = CreateBulkInsertScriptArguments(fileNames, currentCount, fileCount, maxScriptSize);
            var args = new dynamic[] { JsonConvert.DeserializeObject<dynamic>(argsJson) };

            // 6. execute the batch.
            StoredProcedureResponse<int> scriptResult = await client.ExecuteStoredProcedureAsync<int>(
                sproc.SelfLink,
                new RequestOptions { PartitionKey = new PartitionKey("Andersen") },
                args);

            // 7. Prepare for next batch.
            int currentlyInserted = scriptResult.Response;
            currentCount += currentlyInserted;
        }

        // 8. Validate
        int numDocs = 0;
        string continuation = string.Empty;
        do
        {
            // Read document feed and count the number of documents.
            FeedResponse<dynamic> response = await client.ReadDocumentFeedAsync(collectionLink, new FeedOptions { RequestContinuation = continuation });
            numDocs += response.Count;

            // Get the continuation so that we know when to stop.
            continuation = response.ResponseContinuation;
        }
        while (!string.IsNullOrEmpty(continuation));

        Console.WriteLine("Found {0} documents in the collection. There were originally {1} files in the Data directory\r\n", numDocs, fileCount);
    }


    private static async Task TryDeleteStoredProcedure(string collectionLink, string sprocId)
    {
        StoredProcedure sproc = client.CreateStoredProcedureQuery(collectionLink).Where(s => s.Id == sprocId).AsEnumerable().FirstOrDefault();
        if (sproc != null)
        {
            await client.DeleteStoredProcedureAsync(sproc.SelfLink);
        }
    }

    private static string CreateBulkInsertScriptArguments(string[] docFileNames, int currentIndex, int maxCount, int maxScriptSize)
    {
        var jsonDocumentArray = new StringBuilder();
        jsonDocumentArray.Append("[");

        if (currentIndex >= maxCount) return string.Empty;
        jsonDocumentArray.Append(File.ReadAllText(docFileNames[currentIndex]));

        int scriptCapacityRemaining = maxScriptSize;
        string separator = string.Empty;

        int i = 1;
        while (jsonDocumentArray.Length < scriptCapacityRemaining && (currentIndex + i) < maxCount)
        {
            jsonDocumentArray.Append(", " + File.ReadAllText(docFileNames[currentIndex + i]));
            i++;
        }

        jsonDocumentArray.Append("]");
        return jsonDocumentArray.ToString();
    }

这是 BulkImport.js 文件

function bulkImport(docs) {
var collection = getContext().getCollection();
var collectionLink = collection.getSelfLink();

   //count used as doc index
var count = 0;

// Validate input
if (!docs) throw new Error("The array is undefined or null.");

var docsLength = docs.length;
if (docsLength == 0) {
    getContext().getResponse().setBody(0);
}

// CRUD API to create a document.
tryCreate(docs[count], callback);

function tryCreate(doc, callback) {
    var options = {
        disableAutomaticIdGeneration: true
    };

    var isAccepted = collection.createDocument(collectionLink, doc, options, callback);

    if (!isAccepted) getContext().getResponse().setBody(count);
}

function callback(err, doc, options) {
    if (err) throw err;
    count++;

    if (count >= docsLength) {
          getContext().getResponse().setBody(count);
    } else {

        tryCreate(docs[count], callback);
    }
}
}

在数据文件夹中,我有 100 个 json 文件,这些文件在示例本身中提供。请帮助我创建我正在使用 documentdb 模拟器的新程序。

4

1 回答 1

0

我可以使用 documentdb 模拟器创建新过程。据我所知,目前不支持使用 documentDB Emulator exploer 执行存储过程。以下是我的详细步骤:

1.下载documentdb-emulator并安装在本机

2.从github下载文档演示代码。

3.我们可以从文档中获取本地documentdb账户主密钥,并将其添加到项目中

DocumentDB Emulator 仅支持单个固定帐户和众所周知的主密钥。在 DocumentDB 模拟器中无法重新生成密钥

// Connect to the DocumentDB Emulator running locally

   DocumentClient client = new DocumentClient(new Uri("https://localhost:8081"),"C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==");

在此处输入图像描述

4.打开documentdb模拟器创建db和collection

在此处输入图像描述

在此处输入图像描述

5. 执行演示代码并检查来自 azure emulator exploer 的结果。

在此处输入图像描述

在此处输入图像描述

注意:根据屏幕截图,我们可以发现使用 azure documentdb emulator 选择的过程选项卡上没有输入或结果字段。

如果我们使用 azure documentdb,那么我们可以使用Azure 门户中的 Azure documentdb 执行过程。

在此处输入图像描述

如果我们对 documentdb 模拟器有任何问题,我们可以通过单击 documentdb emulator exploer 上的反馈向 azure documentdb 团队提供反馈。

在此处输入图像描述

于 2017-03-30T03:14:59.310 回答