Avatar
🏃
24 results for Python
  • J’ai voulu faire un premier programme afin de découvrir l’API Elasticsearch, comme base d’information j’ai pris mes emails. C’est assez simple, toutes les personnes sous MacOS ont des emails …

    Voici donc le petit programme en Python (pour Michel) : il suffit de changer MonUser.

    #!/usr/bin/env python3
    
    import email
    import plistlib
    import re
    import glob, os
    from datetime import datetime
    from email.utils import parsedate_to_datetime
    from email.header import Header, decode_header, make_header
    from elasticsearch import Elasticsearch 
    
    class Emlx(object):
            def __init__(self):
                super(Emlx, self).__init__()
                self.bytecount = 0
                self.msg_data = None
                self.msg_plist = None
    
            def parse(self, filename_path):
                with open(filename_path, "rb") as f:
                    self.bytecount = int(f.readline().strip())
                    self.msg_data = email.message_from_bytes(f.read(self.bytecount))
                    self.msg_plist = plistlib.loads(f.read())
                return self.msg_data, self.msg_plist
    
    if __name__ == '__main__':
       msg = Emlx()
       nb_parse = 0
       path_mail = "/Users/MonUser/Library/Mail/V6/"
       es_keys = "mail"
       es=Elasticsearch([{'host':'localhost','port':9200}])
       for root, dirs, files in os.walk(path_mail):
          for file in files:
              if file.endswith(".emlx"):
                 file_full = os.path.join(root, file)
                 message, plist = msg.parse(file_full)
                 statinfo = os.stat(file_full)
                 my_date = message['Date']
                 my_id = message['Message-ID']
                 my_server = message['Received']
                 if my_date is not None and my_date is not Header:
                     my_date_str = datetime.fromtimestamp(parsedate_to_datetime(my_date).timestamp()).strftime('%Y-%m-%dT%H:%M:%S')
                 my_email = str(message['From'])
                 if my_email is not None:
                     my_domain = re.search("@[\w.\-\_]+", str(my_email))
                 if my_email is not None:
                     my_name = re.search("[\w.\-\_]+@", str(my_email))
                 if my_domain is not None:
                     #print(my_domain.group())
                     #print(my_name.group())
                     json = '{"name":"'+my_name.group()+'","domain":"'+my_domain.group()+'"'
                 else:
                     my_email = my_email.replace(",","")
                     my_email = my_email.replace('"','')
                     json = '{"name":"'+my_email+'","domain":"None"';
                 if my_date is not None:
                     json = json+',"date":"'+my_date_str+'","size":'+str(statinfo.st_size)+',"id":'+str(nb_parse)
                 else:
                     json = json+',"size":'+str(statinfo.st_size)+',"id":'+str(nb_parse)
                 if my_server is not None and my_server is not Header:
                     ip = re.search(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', str(my_server))
                     if ip is not None:
                        my_ip = ip.group()
                        json = json+',"ip":"'+str(my_ip)+'"'
                     else:
                        my_ip = ""
                     #ip = re.findall(r'\b25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\b',my_server)
                     #ip = re.findall( r'[0-9]+(?:\.[0-9]+){1,3}', my_server )
                     #ip = re.findall(r'[\d.-]+', my_server) 
                 else:
                     json = json
                 if my_id is not None and my_id is not Header:
                     my_id =my_id.strip()
                     my_id =my_id.strip('\n')
                     json = json+',"Message-ID":"'+my_id+'","file":"'+file+'"}'
                 else:
                     json = json+',"file":"'+file+'"}'
                 print(json)
                 res = es.index(index=es_keys,doc_type='emlx',id=nb_parse,body=json)
                 nb_parse += 1
                 #print(plist)
       print(nb_parse)

    Le but de ce programme c’est simplement de mieux comprendre l’API. Pour le lancer j’ai fait :

    python Created Sun, 24 Nov 2019 00:00:00 +0000
  • J’ai finalement fait une version V2 qui corrige quelques problèmes (Exception de quelques emails our quelques noms de domaines). Il faut mettre son MonLogin .

    #!/usr/bin/env python3
    
    import email
    import plistlib
    import re
    import glob, os
    import string
    from datetime import datetime
    from email.utils import parsedate_to_datetime
    from email.header import Header, decode_header, make_header
    from elasticsearch import Elasticsearch 
    
    class Emlx(object):
            def __init__(self):
                super(Emlx, self).__init__()
                self.bytecount = 0
                self.msg_data = None
                self.msg_plist = None
    
            def parse(self, filename_path):
                with open(filename_path, "rb") as f:
                    self.bytecount = int(f.readline().strip())
                    self.msg_data = email.message_from_bytes(f.read(self.bytecount))
                    self.msg_plist = plistlib.loads(f.read())
                return self.msg_data, self.msg_plist
    
    if __name__ == '__main__':
       msg = Emlx()
       nb_parse = 0
       nb_error = 0
       printable = set(string.printable)
       path_mail = "/Users/MonLogin/Library/Mail/V6/"
       es_keys = "mail"
       es=Elasticsearch([{'host':'localhost','port':9200}])
       for root, dirs, files in os.walk(path_mail):
          for file in files:
              if file.endswith(".emlx"):
                 file_full = os.path.join(root, file)
                 message, plist = msg.parse(file_full)
                 statinfo = os.stat(file_full)
                 my_date = message['Date']
                 my_id = message['Message-ID']
                 my_server = message['Received']
                 my_date_str = ""
                 if my_date is not None and my_date is not Header:
                     try:
                       my_date_str = datetime.fromtimestamp(parsedate_to_datetime(my_date).timestamp()).strftime('%Y-%m-%dT%H:%M:%S')
                     except :
                       my_date_str = ""
                 my_email = str(message['From'])
                 if my_email is not None:
                     my_domain = re.search("@[\w.\-\_]+", str(my_email))
                     if my_domain is not None:
                          my_domain_str = str(my_domain.group ());
                          my_domain_str = my_domain_str.lower()
                 if my_email is not None:
                     my_name = re.search("[\w.\-\_]+@", str(my_email))
                     if my_name is not None:
                          my_name_str = str(my_name.group ());
                          my_name_str = my_name_str.lower()
                 if my_domain is not None:
                     #print(my_domain.group())
                     #print(my_name.group())
                     json = '{"name":"'+my_name_str+'","domain":"'+my_domain_str+'"'
                 else:
                     my_email = my_email.replace(",","")
                     my_email = my_email.replace('"','')
                     my_email = str(re.sub(r'[^\x00-\x7f]',r'', my_email)) 
                     my_email = my_email.lower()
                     json = '{"name":"'+my_email+'","domain":"None"';
                 if my_date is not None and len(my_date_str) > 1:
                     json = json+',"date":"'+my_date_str+'","size":'+str(statinfo.st_size)+',"id":'+str(nb_parse)
                 else:
                     json = json+',"size":'+str(statinfo.st_size)+',"id":'+str(nb_parse)
                 if my_server is not None and my_server is not Header:
                     ip = re.search(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', str(my_server))
                     if ip is not None:
                        my_ip = ip.group()
                        json = json+',"ip":"'+str(my_ip)+'"'
                     else:
                        my_ip = ""
                     #ip = re.findall(r'\b25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\b',my_server)
                     #ip = re.findall( r'[0-9]+(?:\.[0-9]+){1,3}', my_server )
                     #ip = re.findall(r'[\d.-]+', my_server) 
                 else:
                     json = json
                 if my_id is not None and my_id is not Header:
                     try:
                        my_id =my_id.strip()
                        my_id =my_id.strip('\n')
                        json = json+',"Message-ID":"'+my_id+'","file":"'+file+'"}'
                     except:
                        json = json+',"file":"'+file+'"}'
                 else:
                     json = json+',"file":"'+file+'"}'
                 print(json)
                 try:
                    res = es.index(index=es_keys,doc_type='emlx',id=nb_parse,body=json)
                 except:
                    nb_error += 1   
                 nb_parse += 1
                 #print(plist)
       print(nb_parse)

     

    kibana python Created Sun, 24 Nov 2019 00:00:00 +0000
  • En quelques étapes …

    Etape 1: Voir un tweet passer de @ollybret :

    Etape 2 : Vérifier l’information sur @datagouvfr : le lien étant : https://www.data.gouv.fr/fr/datasets/demandes-de-valeurs-foncieres/ 

    Et se faire un profil :

    Etape 3 : Voir du JSON : Micro-API DVF (Demande de Valeurs Foncières) et une API :

    Voir même le résultat en fonction de la commune ou du code postal :

    Superbe travail de Christian Quest ( sur Twitter @cq94 ‏) avec en plus la mise du code sur GitHub : https://github.com/cquest/dvf_as_api .( @github )

    inondation opendata python Created Sun, 28 Apr 2019 00:00:00 +0000
  • Awesome Note 2, it’s very popular on iPad :

    The new All-in-one Organizer, Awesome Note 2 is integrated with note and schedule management.
    And now it’s available!!

    WONDERFUL WRITING FEATURES
    · It can be used not only for simple notes, but also rich and wonderful writing tool.
    · Make notes even more powerful to add photos, voice recording and drawings.
    · Easily create diary notes to display feeling, weather or road map information.

    joplin migration python Created Thu, 14 Feb 2019 00:00:00 +0000
  • Step 0 : Install Joplin and activate the REST API ( https://joplin.cozic.net/api/ ) .

    Step 1: Install gmplot with pip

    $ pip install gmplot
    Collecting gmplot
      Downloading https://files.pythonhosted.org/packages/e2/b1/e1429c31a40b3ef5840c16f78b506d03be9f27e517d3870a6fd0b356bd46/gmplot-1.2.0.tar.gz (115kB)
        100% |████████████████████████████████| 122kB 1.0MB/s 
    Requirement already satisfied: requests in /usr/local/lib/python3.7/site-packages (from gmplot) (2.21.0)
    Requirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/local/lib/python3.7/site-packages (from requests->gmplot) (1.24.1)
    Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/site-packages (from requests->gmplot) (2018.11.29)
    Requirement already satisfied: idna<2.9,>=2.5 in /usr/local/lib/python3.7/site-packages (from requests->gmplot) (2.8)
    Requirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/local/lib/python3.7/site-packages (from requests->gmplot) (3.0.4)
    Building wheels for collected packages: gmplot
      Building wheel for gmplot (setup.py) ... done
      Stored in directory: /Users/...../Library/Caches/pip/wheels/81/6a/76/4dd6a7cc310ba765894159ee84871e8cd55221d82ef14b81a1
    Successfully built gmplot
    Installing collected packages: gmplot
    Successfully installed gmplot-1.2.0

    The source code : (change your token)

    joplin python Created Wed, 13 Feb 2019 00:00:00 +0000
  • Step 0 : Install Joplin and activate the REST API ( https://joplin.cozic.net/api/ ) .

    Step 1: Install staticmap with pip ( for more information see https://github.com/komoot/staticmap )

    $ pip install staticmap
    Collecting staticmap
      Downloading https://files.pythonhosted.org/packages/f9/9f/5a3843533eab037cba031486175c4db1b214614404a29516208ff228dead/staticmap-0.5.4.tar.gz
    Collecting Pillow (from staticmap)
      Downloading https://files.pythonhosted.org/packages/c9/ed/27cc92e99b9ccaa0985a66133baeea7e8a3371d3c04cfa353aaa3b81aac1/Pillow-5.4.1-cp37-cp37m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl (3.7MB)
        100% |████████████████████████████████| 3.7MB 6.3MB/s 
    Requirement already satisfied: requests in /usr/local/lib/python3.7/site-packages (from staticmap) (2.21.0)
    Requirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/local/lib/python3.7/site-packages (from requests->staticmap) (3.0.4)
    Requirement already satisfied: idna<2.9,>=2.5 in /usr/local/lib/python3.7/site-packages (from requests->staticmap) (2.8)
    Requirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/local/lib/python3.7/site-packages (from requests->staticmap) (1.24.1)
    Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/site-packages (from requests->staticmap) (2018.11.29)
    Building wheels for collected packages: staticmap
      Building wheel for staticmap (setup.py) ... done
      Stored in directory: /Users/..../Library/Caches/pip/wheels/fe/a6/a5/2acceb72471d85bd0498973aabd611e6ff1cdd48796790f047
    Successfully built staticmap
    Installing collected packages: Pillow, staticmap
    Successfully installed Pillow-5.4.1 staticmap-0.5.4

    The source code :

    joplin maps python rest-api Created Wed, 13 Feb 2019 00:00:00 +0000
  • Install JOPLIN : https://joplin.cozic.net ,  and start REST API.

    Step 1 : Download all with https://takeout.google.com

    Step 2 : Uncompress and put all on same folder.

    Step 3 : Put this script in folder.

    Step 4 : Edit the script and put your token

    The script :

    #
    # Version 1 
    # for Python 3
    # 
    #   ARIAS Frederic
    #   Sorry ... It's difficult for me the python :)
    #
    
    from os import listdir
    from pathlib import Path
    import glob
    import csv
    import locale
    import os
    import time
    from datetime import datetime
    import json
    import requests
    
    nb_metadata = 0
    nb_metadata_import = 0
    def month_string_to_number(string):
        m = {
            'janv.': 1,
            'feb.': 2,
            'févr.': 2,
            'mar.': 3,
            'mars': 3,
            'apr.':4,
            'avr.':4,
             'may.':5,
             'mai':5,
             'juin':6,
             'juil.':7,
             'aug.':8,
             'août':8,
             'sept.':9,
             'oct.':10,
             'nov.':11,
             'déc.':12
            }
        s = string.strip()[:5].lower()
    
        try:
            out = m[s]
            return out
        except:
            raise ValueError('Not a month')
    
    locale.setlocale(locale.LC_TIME, 'fr_FR.UTF-8')
    #today = datetime.date.today()
    #print(today.strftime('The date :%d %b. %Y à %H:%M:%S UTC'))
    from time import strftime,localtime
    print(localtime())
    print(strftime("%H:%M:%S, %d %b. %Y",localtime()))
    date = datetime.strptime('2017-05-04',"%Y-%m-%d")
    
    #Token
    ip = "127.0.0.1"
    port = "41184"
    token = "Put your token here"
    
    nb_import = 0;
    headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
    
    url_notes = (
        "http://"+ip+":"+port+"/notes?"
        "token="+token
    )
    url_folders = (
        "http://"+ip+":"+port+"/folders?"
        "token="+token
    )
    url_tags = (
        "http://"+ip+":"+port+"/tags?"
        "token="+token
    )
    url_ressources = (
        "http://"+ip+":"+port+"/ressources?"
        "token="+token
    )
    
    #Init
    GooglePlus_UID = "12345678901234567801234567890123"
    UID = {}
    
    payload = {
        "id":GooglePlus_UID,
        "title":"GooglePlus Import"
    }
    
    try:
        resp = requests.post(url_folders, data=json.dumps(payload, separators=(',',':')), headers=headers)
        resp.raise_for_status()
        resp_dict = resp.json()
        print(resp_dict)
        print("My ID")
        print(resp_dict['id'])
        GooglePlus_UID_real = resp_dict['id']
        save = str(resp_dict['id'])
        UID[GooglePlus_UID]= save
    except requests.exceptions.HTTPError as e:
        print("Bad HTTP status code:", e)
    except requests.exceptions.RequestException as e:
        print("Network error:", e)
    
    for csvfilename in glob.iglob('Takeout*/**/*.metadata.csv', recursive=True):
      nb_metadata += 1
      print(nb_metadata," ",csvfilename)
      #print("Picture:"+os.path.basename(csvfilename))
      mybasename = os.path.basename(csvfilename)
      mylist = mybasename.split(".")
      myfilename = mylist[0] + "." + mylist[1]
      filename = os.path.dirname(csvfilename)+"/"+myfilename
      my_file = Path(filename)
      with open(csvfilename) as csvfile:
        reader = csv.DictReader(csvfile)
        for row in reader:
            if (len(row['description']) > 0):
                print(row['title'], row['description'], row['creation_time.formatted'], row['geo_data.latitude'], row['geo_data.longitude'])
                #date = datetime.strptime(row['creation_time.formatted'], "%d %b %Y à %H:%M:%S %Z").timetuple()
                #print(date)
                mylist2 = row['creation_time.formatted'].split(" ");
                mylist3 = mylist2[4].split(":");
                date = date.replace(hour=int(mylist3[0]), year=int(mylist2[2]), month=month_string_to_number(mylist2[1]), day=int(mylist2[0]))
                timestamp = time.mktime(date.timetuple())*1000
                print(timestamp)
                nb_metadata_import += 1
                mybody = row['description']
                if (len(row['geo_data.latitude']) > 2):
                  payload_note = {
                    "parent_id":GooglePlus_UID_real,
                    "title":row['creation_time.formatted'],
                    "source":myfilename,
                    "source_url":row['url'],
                    "order":nb_metadata_import,
                    "body":mybody
                    }
                  payload_note_put = {
                    "latitude":float(row['geo_data.latitude']),
                    "longitude":float(row['geo_data.longitude']),
                    "source":myfilename,
                    "source_url":row['url'],
                    "order":nb_metadata_import,
                    "user_created_time":timestamp,
                    "user_updated_time":timestamp,
                    "author":"Google+"
                    }
                else:
                   payload_note = {
                    "parent_id":GooglePlus_UID_real,
                    "title":row['creation_time.formatted'],
                    "source":myfilename,
                    "source_url":row['url'],
                    "order":nb_metadata_import,
                    "user_created_time":timestamp,
                    "user_updated_time":timestamp,
                    "author":"Google+",
                    "body":mybody
                    }
                   payload_note_put = {
                    "source":myfilename,
                    "order":nb_metadata_import,
                    "source_url":row['url'],
                    "user_created_time":timestamp,
                    "user_updated_time":timestamp,
                    "author":"Google+"
                    }
    
                try:
                    resp = requests.post(url_notes, json=payload_note)
                    resp.raise_for_status()
                    resp_dict = resp.json()
                    print(resp_dict)
                    print(resp_dict['id'])
                    myuid= resp_dict['id']
                except requests.exceptions.HTTPError as e:
                    print("Bad HTTP status code:", e)
                except requests.exceptions.RequestException as e:
                    print("Network error:", e)
    
                url_notes_put = (
        "http://"+ip+":"+port+"/notes/"+myuid+"?"
        "token="+token
    )
    
                try:
                    resp = requests.put(url_notes_put, json=payload_note_put)
                    resp.raise_for_status()
                    resp_dict = resp.json()
                    print(resp_dict)
                except requests.exceptions.HTTPError as e:
                    print("Bad HTTP status code:", e)
                except requests.exceptions.RequestException as e:
                    print("Network error:", e)
                
                if my_file.is_file():
                   cmd = "curl -F 'data=@"+filename+"' -F 'props={\"title\":\""+myfilename+"\"}' http://"+ip+":"+port+"/resources?token="+token
                   print("Command"+cmd)
                   resp = os.popen(cmd).read()
                   try:
                      respj = json.loads(resp)
                      print(respj['id'])
                      myuid_picture= respj['id']
                   except:
                      print('bad json: ', resp)
    
                   mybody = row['description'] + "\n  ![" + myfilename + "](:/" + myuid_picture + ")   \n";
    
                   payload_note_put = {
                    "body":mybody
                    }
    
                   try:
                      resp = requests.put(url_notes_put, json=payload_note_put)
                      resp.raise_for_status()
                      resp_dict = resp.json()
                      print(resp_dict)
                   except requests.exceptions.HTTPError as e:
                      print("Bad HTTP status code:", e)
                   except requests.exceptions.RequestException as e:
                      print("Network error:", e)
    
    print(nb_metadata)
    print(nb_metadata_import)
    joplin python Created Mon, 11 Feb 2019 00:00:00 +0000
  • Voici ce que j’ai fait pour faire l’installation de pip sur Mac OS :

    $ curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
      % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                     Dload  Upload   Total   Spent    Left  Speed
    100 1662k  100 1662k    0     0   560k      0  0:00:02  0:00:02 --:--:-- 560k
    
    $ python3 get-pip.py
    Collecting pip
      Downloading https://files.pythonhosted.org/packages/d7/41/34dd96bd33958e52cb4da2f1bf0818e396514fd4f4725a79199564cd0c20/pip-19.0.2-py2.py3-none-any.whl (1.4MB)
        100% |████████████████████████████████| 1.4MB 154kB/s 
    Installing collected packages: pip
      Found existing installation: pip 18.1
        Uninstalling pip-18.1:
          Successfully uninstalled pip-18.1
    Successfully installed pip-19.0.2
    
    $ pip install feedparser
    Collecting feedparser
      Downloading https://files.pythonhosted.org/packages/91/d8/7d37fec71ff7c9dbcdd80d2b48bcdd86d6af502156fc93846fb0102cb2c4/feedparser-5.2.1.tar.bz2 (192kB)
        100% |████████████████████████████████| 194kB 500kB/s 
    Building wheels for collected packages: feedparser
      Building wheel for feedparser (setup.py) ... done
      Stored in directory: ....
    Successfully built feedparser
    Installing collected packages: feedparser
    Successfully installed feedparser-5.2.1
    pip python Created Mon, 11 Feb 2019 00:00:00 +0000
  • (See the finale release : https://www.cyber-neurones.org/2019/02/diaro-app-pixel-crater-ltd-diarobackup-xml-how-to-migrate-data-to-joplin/ )

    Je pensais avoir trouvé mon bug … je pensais que c’était l’espace avant la variable qui faisait que les valeurs n’était pas prise en compte.

    Avec : requests.post(url_folders, json=payload) on a un . c.a.d. un 20

    Avec : requests.post(url_folders, data=json.dumps(payload, separators=(’,’,’:’)), headers=headers)

    Sachant que : headers = {‘Content-type’: ‘application/json’, ‘Accept’: ’text/plain’}

    Je n’ai plus d’espace mais les valeurs numériques ne sont pas prise en compte …

    python Created Sat, 09 Feb 2019 00:00:00 +0000
  • (See the finale release : https://www.cyber-neurones.org/2019/02/diaro-app-pixel-crater-ltd-diarobackup-xml-how-to-migrate-data-to-joplin/ )

    I have issue with ressources (link between ressources and notes) …. error 404. The logs in : .config/joplin-desktop/log-clipper.txt

    ....: "Request: PUT /ressources/71dd2cba2af54c4ebb53fb7fd8d0543b/notes/cbbc6076b2ac321ccae1f036a2fe6659?token=...."
    ....: "Error: Not Found
    Error: Not Found
        at Api.route (/Applications/Joplin.app/Contents/Resources/app/lib/services/rest/Api.js:103:41)
        at execRequest (/Applications/Joplin.app/Contents/Resources/app/lib/ClipperServer.js:147:39)
        at IncomingMessage.request.on (/Applications/Joplin.app/Contents/Resources/app/lib/ClipperServer.js:185:8)
        at emitNone (events.js:105:13)
        at IncomingMessage.emit (events.js:207:7)
        at endReadableNT (_stream_readable.js:1045:12)
        at _combinedTickCallback (internal/process/next_tick.js:138:11)
        at process._tickCallback (internal/process/next_tick.js:180:9)"

    My last code :

    #
    # Version 2 
    # for Python 3
    # 
    #   ARIAS Frederic
    #   Sorry ... It's difficult for me the python :)
    #
    
    #from lxml import etree
    import xml.etree.ElementTree as etree
    from time import gmtime, strftime
    import time
    import json
    import requests
    import os
    
    strftime("%Y-%m-%d %H:%M:%S", gmtime())
    start = time.time()
    
    #Token
    ip = "127.0.0.1"
    port = "41184"
    token = "ABCD123ABCD123ABCD123ABCD123ABCD123"
    
    url_notes = (
        "http://"+ip+":"+port+"/notes?"
        "token="+token
    )
    url_folders = (
        "http://"+ip+":"+port+"/folders?"
        "token="+token
    )
    url_tags = (
        "http://"+ip+":"+port+"/tags?"
        "token="+token
    )
    url_ressources = (
        "http://"+ip+":"+port+"/ressources?"
        "token="+token
    )
    
    #Init
    Diaro_UID = "12345678901234567801234567890123"
    Lat = {}
    Lng = {}
    UID = {} 
    TAGS = {}
    Lat[""] = ""
    Lng[""] = ""
    
    payload = {
        "id": Diaro_UID,
        "title": "Diaro Import"
    }
    
    try:
        resp = requests.post(url_folders, json=payload)
        #time.sleep(1)
        resp.raise_for_status()
        resp_dict = resp.json()
        print(resp_dict)
        print("My ID")
        print(resp_dict['id'])
        Diaro_UID_real = resp_dict['id']
        save = str(resp_dict['id'])
        UID[Diaro_UID]= save
    except requests.exceptions.HTTPError as e:
        print("Bad HTTP status code:", e)
    except requests.exceptions.RequestException as e:
        print("Network error:", e)
    
    print("Start : Parse Table")
    tree = etree.parse("./DiaroBackup.xml")
    for table in tree.iter('table'):
        name = table.attrib.get('name')
        print(name)
        myorder = 1
        for r in table.iter('r'):
             myuid = ""
             mytitle = ""
             mylat = ""
             mylng = ""
             mytags = ""
             mydate = ""
             mydate_ms = 0;
             mytext = ""
             myfilename = ""
             myfolder_uid = Diaro_UID
             mylocation_uid = ""
             myprimary_photo_uid = ""
             myentry_uid = ""
             myorder += 1
             for subelem in r:
                 print(subelem.tag)
                 if (subelem.tag == 'uid'):
                     myuid = subelem.text
                     print ("myuid",myuid)
                 if (subelem.tag == 'entry_uid'):
                     myentry_uid = subelem.text
                     print ("myentry_uid",myentry_uid)
                 if (subelem.tag == 'primary_photo_uid'):
                     myprimary_photo_uid = subelem.text
                     print ("myprimary_photo_uid",myprimary_photo_uid)
                 if (subelem.tag == 'folder_uid'):
                     myfolder_uid = subelem.text
                     print ("myfolder_uid",myfolder_uid)
                 if (subelem.tag == 'location_uid'):
                     mylocation_uid = subelem.text
                     print ("mylocation_uid",mylocation_uid)
                 if (subelem.tag == 'date'):
                     mydate = subelem.text
                     mydate_ms = int(mydate)
                     print ("mydate",mydate," in ms",mydate_ms)
                 if (subelem.tag == 'title'):
                     mytitle = subelem.text
                     print ("mytitle",mytitle)
                     #if type(mytitle) == str:
                        #mytitle = mytitle.encode('utf8')
                 if (subelem.tag == 'lat'):
                     mylat = subelem.text
                     print ("mylat",mylat)
                 if (subelem.tag == 'lng'):
                     mylng = subelem.text
                     print ("mylng",mylng)
                 if (subelem.tag == 'tags'):
                     mytags = subelem.text
                     if mytags:
                        mytags[1:]
                     print ("mytags",mytags)
                 if (subelem.tag == 'text'):
                     mytext = subelem.text
                     print ("mytext",mytext)
                     #if type(mytext) == str:
                           #mytext = mytext.encode('utf8')
                 if (subelem.tag == 'filename'):
                     myfilename = subelem.text
                     print ("myfilename",myfilename)
                     
             if (name == 'diaro_folders'):
                payload_folder = {
      "id": myuid,
      "title": mytitle,
      "parent_id": Diaro_UID_real
    }
                print(payload_folder)
                try:
                    resp = requests.post(url_folders, json=payload_folder)
                    #time.sleep(1)
                    resp.raise_for_status()
                    resp_dict = resp.json()
                    print(resp_dict)
                    print(resp_dict['id'])
                    save = str(resp_dict['id']) 
                    UID[myuid]= save
                except requests.exceptions.HTTPError as e:
                    print("Bad HTTP status code:", e)
                except requests.exceptions.RequestException as e:
                    print("Network error:", e)
    
             if (name == 'diaro_tags'):
                payload_tags = {
                    "id": myuid,
                    "title": mytitle
                }
                try:
                    resp = requests.post(url_tags, json=payload_tags)
                    #time.sleep(1)
                    resp.raise_for_status()
                    resp_dict = resp.json()
                    print(resp_dict)
                    print(resp_dict['id'])
                    UID[myuid]= resp_dict['id']
                    TAGS[myuid] = mytitle
                except requests.exceptions.HTTPError as e:
                    print("Bad HTTP status code:", e)
                except requests.exceptions.RequestException as e:
                    print("Network error:", e)
    
             if (name == 'diaro_attachments'):
                payload_ressource = {
                    "id": myuid
                }
                filename = "./media/photo/"+myfilename
                files = {'document': open(filename, 'rb')}
                files2 = {'data': open(filename, 'rb')}
                files3 = {'data': open(filename, 'rb'), 'props': payload_ressource}
                data_ressource = {
                     "title": myfilename
                }
                multiple_files = [
                    ('data', (myfilename, open(filename, 'rb'))),
                    ('props', data_ressource)]
                headers = {'Content-type': 'multipart/form-data'}
                print("Push : "+filename);
                #print os.path.isfile(filename)
                print("----------0-----------")
                #try:
                   #resp = requests.post(url_ressources, files=filename, json=payload_ressource)
                   #resp = requests.post(url_ressources, files=files, json=payload_ressource, headers=headers) 
                   #resp = requests.post(url_ressources, files=files2, headers=headers)
                   #resp = requests.post(url_ressources, files=files2, headers=headers)
                   #resp = requests.post(url_ressources,files = {'data' : (myfilename, open(filename, 'rb'), 'image/jpg')}, data = {'id' : myuid}, headers=headers)
                   #resp = requests.post(url_ressources,files = files2, data= data_ressource, headers=headers)
                   #resp = requests.post(url_ressources,files = multiple_files, headers=headers)
                   #resp = requests.post(url_ressources,files = multiple_files)
                   #resp.text
                   #time.sleep(1)
                   #resp.raise_for_status()
                   #if (resp.status_code == requests.codes.ok):
                   #    resp_dict = resp.json()
                   #    print(resp_dict)
                   #    print(resp_dict['id'])
                   #    UID[myuid]= resp_dict['id']
                #except requests.exceptions.HTTPError as e:
                   #print("Bad HTTP status code:", e)
                   #UID[myuid]=""
                   #print("----------1-----------")
                #except requests.exceptions.RequestException as e:
                   #print("Network error:", e)
                   #UID[myuid]=""
                   #print("----------2-----------")
    
                cmd = "curl -F 'data=@"+filename+"' -F 'props={\"title\":\""+myfilename+"\"}' http://"+ip+":"+port+"/resources?token="+token
                resp = os.popen(cmd).read()
                respj = json.loads(resp)
                #resp_dict = respj.json() 
                print(respj['id'])
                UID[myuid]= respj['id']
    
                print("Link : ",myuid," => ",myentry_uid," // ",UID[myuid]+" => ",UID[myentry_uid])
                time.sleep(1)
    
                cmd = "curl -X PUT http://"+ip+":"+port+"/ressources/"+UID[myuid]+"/notes/"+UID[myentry_uid]+"?token="+token
                resp = os.popen(cmd).read()
                print (resp)
                #url_link = (
                #   "http://"+ip+":"+port+"/ressources/"+UID[myuid]+"/notes/"+UID[myentry_uid]+"?"
                #   "token="+token
                #   )
                #try:
                #  resp = requests.post(url_link)
                #   #time.sleep(1)
                #   resp.raise_for_status()
                #   resp_dict = resp.json()
                #   print(resp_dict)
                #   print(resp_dict['id'])
                #   UID[myuid]= resp_dict['id']
                #except requests.exceptions.HTTPError as e:
                #   print("Bad HTTP status code:", e)
                #except requests.exceptions.RequestException as e:
                #   print("Network error:", e)
    
             if (name == 'diaro_locations'):
                  Lat[myuid] = mylat
                  Lng[myuid] = mylng
    
             if (name == 'diaro_entries'):
                if not mytext:
                      mytext = ""
                if not myfolder_uid:
                      myfolder_uid = Diaro_UID
                if not mytags:
                      mytags = ""
                if not mylocation_uid:
                      mylocation_uid = ""
                mytext = mytext.replace("'", "")
                mytitle = mytitle.replace("'", "")
                mytext = mytext.strip("\'")
                mytitle = mytitle.strip("\'")
                mytext = mytext.strip('(')
                mytitle = mytitle.strip('(')
                listtags = mytags.split(",")
                new_tagslist = "";
                for uid_tags in listtags:
                     if (len(uid_tags) > 2):
                            if uid_tags in UID:
                                 new_tagslist = new_tagslist + TAGS[uid_tags] + ",";
                print ("TAGS",mytags,"==>",new_tagslist);
                payload_note = {
                    "id": myuid,
                    "latitude": Lat[mylocation_uid],
                    "longitude": Lng[mylocation_uid],
                    "tags": new_tagslist,
                    "parent_id": UID[myfolder_uid],
                    "title": mytitle,
                    #"created_time": mydate_ms,
                    "user_created_time": mydate_ms,
                    "user_updated_time": mydate_ms,
                    "author": "Diaro",
                    "body": mytext 
                }
                try:
                    resp = requests.post(url_notes, json=payload_note)
                    #time.sleep(1)
                    resp.raise_for_status()
                    resp_dict = resp.json()
                    print(resp_dict)
                    print(resp_dict['id'])
                    UID[myuid]= resp_dict['id']
                except requests.exceptions.HTTPError as e:
                    print("Bad HTTP status code:", e)
                except requests.exceptions.RequestException as e:
                    print("Network error:", e)
    
    print("End : Parse Table")
    
    strftime("%Y-%m-%d %H:%M:%S", gmtime())
    done = time.time()
    elapsed = done - start
    print(elapsed)
    
    # END : Ouf ...
    diaro python Created Fri, 08 Feb 2019 00:00:00 +0000