Import a CSV in a Cosmos DB over Azure Function

If you want automatic import a CSV into a Cosmos DB to update data an Azure Function is your best option for this.

My script is written in python. First, create a function.json to define the trigger:

  "scriptFile": "",
  "bindings": [
      "name": "myblob",
      "type": "blobTrigger",
      "direction": "in",
      "path": "transferin/Erweiterung_testdaten.csv",
      "connection": "sausstoraget_STORAGE"

import logging
import azure.functions as func
import os, io
import pandas as pd

import json    

import azure.cosmos.cosmos_client as cosmos_client
import azure.cosmos.errors as errors

config = {
    'ENDPOINT': os.environ['ENDPOINT'],
    'PRIMARYKEY': os.environ['PRIMARYKEY'],
    'DBLink': os.environ['DBLink']

def main(myblob: func.InputStream):"Python blob trigger function processed blob \n"
                 f"Name: {}\n"
                 f"Blob Size: {myblob.length} bytes")
    file = (type(file))
    df = pd.read_csv(io.BytesIO(file), sep=';', dtype=str) (df)
    results = []
    results = json.loads(df.to_json(orient='records')) (len(results))
    client = cosmos_client.CosmosClient(url_connection=config['ENDPOINT'], auth={'masterKey': config['PRIMARYKEY']})
    for item in results:"Import")
        item['id'] = item['CONTRACT_ID'],indent=2))

            client.CreateItem(config['DBLink'], item)
        except errors.HTTPFailure as e:
            if e.status_code == 409:
                  query = {'query': 'SELECT * FROM c where"%s"' % item['id']}
                  options = {}
                  docs = client.QueryItems(config['DBLink'], query, options)
                  doc = list(docs)[0]

                  # Get the document link from attribute `_self`
                  doc_link = doc['_self']
                  client.ReplaceItem(doc_link, item)



You can download the files from here

Leave a Reply