Finalement dans les 200.000 emails je pense avoir des doublons … je vais donc profiter de l’export vers Elastciseach/Kibana pour voir si j’ai des doublons. L’email qu’il va avoir la même taille et le même checksum MD5 sera considéré comme un doublons.
Voici donc la version V3 (sans la suppression de fichier : os.unlink(path) )
#!/usr/bin/env python3
import email
import plistlib
import hashlib
import re
import glob, os
import string
from datetime import datetime
from email.utils import parsedate_to_datetime
from email.header import Header, decode_header, make_header
from elasticsearch import Elasticsearch
class Emlx(object):
def __init__(self):
super(Emlx, self).__init__()
self.bytecount = 0
self.msg_data = None
self.msg_plist = None
def parse(self, filename_path):
with open(filename_path, "rb") as f:
self.bytecount = int(f.readline().strip())
self.msg_data = email.message_from_bytes(f.read(self.bytecount))
self.msg_plist = plistlib.loads(f.read())
return self.msg_data, self.msg_plist
def md5(fname):
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
if __name__ == '__main__':
msg = Emlx()
nb_parse = 0
nb_error = 0
save_space = 0
list_email = []
printable = set(string.printable)
path_mail = "/Users/MonLogin/Library/Mail/V6/"
es_keys = "mail"
es=Elasticsearch([{'host':'localhost','port':9200}])
for root, dirs, files in os.walk(path_mail):
for file in files:
if file.endswith(".emlx"):
file_full = os.path.join(root, file)
my_check = md5(root+'/'+file)
my_count = list_email.count(my_check)
list_email.append(my_check)
message, plist = msg.parse(file_full)
statinfo = os.stat(file_full)
if (my_count > 0):
save_space += int(statinfo.st_size)
#os.unlink(root+'/'+file)
my_date = message['Date']
my_id = message['Message-ID']
my_server = message['Received']
my_date_str = ""
if my_date is not None and my_date is not Header:
try:
my_date_str = datetime.fromtimestamp(parsedate_to_datetime(my_date).timestamp()).strftime('%Y-%m-%dT%H:%M:%S')
except :
my_date_str = ""
my_email = str(message['From'])
my_email = str(make_header(decode_header(my_email)))
if my_email is not None:
my_domain = re.search("@[\w.\-\_]+", str(my_email))
if my_domain is not None:
my_domain_str = str(my_domain.group ());
my_domain_str = my_domain_str.lower()
if my_email is not None:
my_name = re.search("[\w.\-\_]+@", str(my_email))
if my_name is not None:
my_name_str = str(my_name.group ());
my_name_str = my_name_str.lower()
json = '{"checksum":"'+my_check+'","count":"'+str(my_count)+'","size":'+str(statinfo.st_size)
if my_domain is not None:
#print(my_domain.group())
#print(my_name.group())
json = json+',"name":"'+my_name_str+'","domain":"'+my_domain_str+'"'
else:
my_email = my_email.replace(",","")
my_email = my_email.replace('"','')
my_email = str(re.sub(r'[^\x00-\x7f]',r'', my_email))
my_email = my_email.lower()
json = json+',"name":"'+my_email+'","domain":"None"';
if my_date is not None and len(my_date_str) > 1:
json = json+',"date":"'+my_date_str+'","id":'+str(nb_parse)
else:
json = json+',"id":'+str(nb_parse)
if my_server is not None and my_server is not Header:
ip = re.search(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', str(my_server))
if ip is not None:
my_ip = ip.group()
json = json+',"ip":"'+str(my_ip)+'"'
else:
my_ip = ""
#ip = re.findall(r'\b25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\b',my_server)
#ip = re.findall( r'[0-9]+(?:\.[0-9]+){1,3}', my_server )
#ip = re.findall(r'[\d.-]+', my_server)
else:
json = json
if my_id is not None and my_id is not Header:
try:
my_id =my_id.strip()
my_id =my_id.strip('\n')
json = json+',"Message-ID":"'+my_id+'","file":"'+file+'"}'
except:
json = json+',"file":"'+file+'"}'
else:
json = json+',"file":"'+file+'"}'
print(json)
try:
res = es.index(index=es_keys,doc_type='emlx',id=nb_parse,body=json)
except:
nb_error += 1
nb_parse += 1
#print(plist)
print(nb_parse)
A suivre pour la V4 !
J’ai voulu faire un premier programme afin de découvrir l’API Elasticsearch, comme base d’information j’ai pris mes emails. C’est assez simple, toutes les personnes sous MacOS ont des emails …
Voici donc le petit programme en Python (pour Michel) : il suffit de changer MonUser.
#!/usr/bin/env python3
import email
import plistlib
import re
import glob, os
from datetime import datetime
from email.utils import parsedate_to_datetime
from email.header import Header, decode_header, make_header
from elasticsearch import Elasticsearch
class Emlx(object):
def __init__(self):
super(Emlx, self).__init__()
self.bytecount = 0
self.msg_data = None
self.msg_plist = None
def parse(self, filename_path):
with open(filename_path, "rb") as f:
self.bytecount = int(f.readline().strip())
self.msg_data = email.message_from_bytes(f.read(self.bytecount))
self.msg_plist = plistlib.loads(f.read())
return self.msg_data, self.msg_plist
if __name__ == '__main__':
msg = Emlx()
nb_parse = 0
path_mail = "/Users/MonUser/Library/Mail/V6/"
es_keys = "mail"
es=Elasticsearch([{'host':'localhost','port':9200}])
for root, dirs, files in os.walk(path_mail):
for file in files:
if file.endswith(".emlx"):
file_full = os.path.join(root, file)
message, plist = msg.parse(file_full)
statinfo = os.stat(file_full)
my_date = message['Date']
my_id = message['Message-ID']
my_server = message['Received']
if my_date is not None and my_date is not Header:
my_date_str = datetime.fromtimestamp(parsedate_to_datetime(my_date).timestamp()).strftime('%Y-%m-%dT%H:%M:%S')
my_email = str(message['From'])
if my_email is not None:
my_domain = re.search("@[\w.\-\_]+", str(my_email))
if my_email is not None:
my_name = re.search("[\w.\-\_]+@", str(my_email))
if my_domain is not None:
#print(my_domain.group())
#print(my_name.group())
json = '{"name":"'+my_name.group()+'","domain":"'+my_domain.group()+'"'
else:
my_email = my_email.replace(",","")
my_email = my_email.replace('"','')
json = '{"name":"'+my_email+'","domain":"None"';
if my_date is not None:
json = json+',"date":"'+my_date_str+'","size":'+str(statinfo.st_size)+',"id":'+str(nb_parse)
else:
json = json+',"size":'+str(statinfo.st_size)+',"id":'+str(nb_parse)
if my_server is not None and my_server is not Header:
ip = re.search(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', str(my_server))
if ip is not None:
my_ip = ip.group()
json = json+',"ip":"'+str(my_ip)+'"'
else:
my_ip = ""
#ip = re.findall(r'\b25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\b',my_server)
#ip = re.findall( r'[0-9]+(?:\.[0-9]+){1,3}', my_server )
#ip = re.findall(r'[\d.-]+', my_server)
else:
json = json
if my_id is not None and my_id is not Header:
my_id =my_id.strip()
my_id =my_id.strip('\n')
json = json+',"Message-ID":"'+my_id+'","file":"'+file+'"}'
else:
json = json+',"file":"'+file+'"}'
print(json)
res = es.index(index=es_keys,doc_type='emlx',id=nb_parse,body=json)
nb_parse += 1
#print(plist)
print(nb_parse)
Le but de ce programme c’est simplement de mieux comprendre l’API. Pour le lancer j’ai fait :
J’ai finalement fait une version V2 qui corrige quelques problèmes (Exception de quelques emails our quelques noms de domaines). Il faut mettre son MonLogin .
#!/usr/bin/env python3
import email
import plistlib
import re
import glob, os
import string
from datetime import datetime
from email.utils import parsedate_to_datetime
from email.header import Header, decode_header, make_header
from elasticsearch import Elasticsearch
class Emlx(object):
def __init__(self):
super(Emlx, self).__init__()
self.bytecount = 0
self.msg_data = None
self.msg_plist = None
def parse(self, filename_path):
with open(filename_path, "rb") as f:
self.bytecount = int(f.readline().strip())
self.msg_data = email.message_from_bytes(f.read(self.bytecount))
self.msg_plist = plistlib.loads(f.read())
return self.msg_data, self.msg_plist
if __name__ == '__main__':
msg = Emlx()
nb_parse = 0
nb_error = 0
printable = set(string.printable)
path_mail = "/Users/MonLogin/Library/Mail/V6/"
es_keys = "mail"
es=Elasticsearch([{'host':'localhost','port':9200}])
for root, dirs, files in os.walk(path_mail):
for file in files:
if file.endswith(".emlx"):
file_full = os.path.join(root, file)
message, plist = msg.parse(file_full)
statinfo = os.stat(file_full)
my_date = message['Date']
my_id = message['Message-ID']
my_server = message['Received']
my_date_str = ""
if my_date is not None and my_date is not Header:
try:
my_date_str = datetime.fromtimestamp(parsedate_to_datetime(my_date).timestamp()).strftime('%Y-%m-%dT%H:%M:%S')
except :
my_date_str = ""
my_email = str(message['From'])
if my_email is not None:
my_domain = re.search("@[\w.\-\_]+", str(my_email))
if my_domain is not None:
my_domain_str = str(my_domain.group ());
my_domain_str = my_domain_str.lower()
if my_email is not None:
my_name = re.search("[\w.\-\_]+@", str(my_email))
if my_name is not None:
my_name_str = str(my_name.group ());
my_name_str = my_name_str.lower()
if my_domain is not None:
#print(my_domain.group())
#print(my_name.group())
json = '{"name":"'+my_name_str+'","domain":"'+my_domain_str+'"'
else:
my_email = my_email.replace(",","")
my_email = my_email.replace('"','')
my_email = str(re.sub(r'[^\x00-\x7f]',r'', my_email))
my_email = my_email.lower()
json = '{"name":"'+my_email+'","domain":"None"';
if my_date is not None and len(my_date_str) > 1:
json = json+',"date":"'+my_date_str+'","size":'+str(statinfo.st_size)+',"id":'+str(nb_parse)
else:
json = json+',"size":'+str(statinfo.st_size)+',"id":'+str(nb_parse)
if my_server is not None and my_server is not Header:
ip = re.search(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', str(my_server))
if ip is not None:
my_ip = ip.group()
json = json+',"ip":"'+str(my_ip)+'"'
else:
my_ip = ""
#ip = re.findall(r'\b25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\b',my_server)
#ip = re.findall( r'[0-9]+(?:\.[0-9]+){1,3}', my_server )
#ip = re.findall(r'[\d.-]+', my_server)
else:
json = json
if my_id is not None and my_id is not Header:
try:
my_id =my_id.strip()
my_id =my_id.strip('\n')
json = json+',"Message-ID":"'+my_id+'","file":"'+file+'"}'
except:
json = json+',"file":"'+file+'"}'
else:
json = json+',"file":"'+file+'"}'
print(json)
try:
res = es.index(index=es_keys,doc_type='emlx',id=nb_parse,body=json)
except:
nb_error += 1
nb_parse += 1
#print(plist)
print(nb_parse)
Ma version de MacOS :
$ uname -a
Darwin MacBook.local 19.0.0 Darwin Kernel Version 19.0.0: Thu Oct 17 16:17:15 PDT 2019; root:xnu-6153.41.3~29/RELEASE_X86_64 x86_64
Mise à jours de brew ( en mode verbose afin d’avoir plus d’information ) :
$ brew update -v
$ brew -v
Homebrew 2.1.16
Homebrew/homebrew-core (git revision 00c2c; last commit 2019-11-18)
Homebrew/homebrew-cask (git revision 9e283; last commit 2019-11-18)
$ brew install grafana
==> Downloading https://homebrew.bintray.com/bottles/grafana-6.4.4.catalina.bottle.tar.gz
Aie première erreur :
...
==> Caveats
Bash completion has been installed to:
/usr/local/etc/bash_completion.d
==> Summary
? /usr/local/Cellar/node/13.1.0: 4,591 files, 54.2MB
==> Installing grafana dependency: yarn
xcrun: error: invalid active developer path (/Library/Developer/CommandLineTools), missing xcrun at: /Library/Developer/CommandLineTools/usr/bin/xcrun
Error: An exception occurred within a child process:
CompilerSelectionError: yarn cannot be built with any available compilers.
Install GNU's GCC:
brew install gcc
Pour fixer le problème :
Les commandes :
$ brew tap elastic/tap
Updating Homebrew...
==> Auto-updated Homebrew!
Updated 1 tap (homebrew/core).
==> Updated Formulae
erlang@20 newman open-zwave stellar-core
==> Deleted Formulae
erlang@19 ruby@2.0
==> Tapping elastic/tap
Cloning into '/usr/local/Homebrew/Library/Taps/elastic/homebrew-tap'...
remote: Enumerating objects: 23, done.
remote: Counting objects: 100% (23/23), done.
remote: Compressing objects: 100% (23/23), done.
remote: Total 23 (delta 10), reused 3 (delta 0), pack-reused 0
Unpacking objects: 100% (23/23), done.
Tapped 18 formulae (66 files, 115.6KB).
$ brew install elastic/tap/metricbeat-full
==> Installing metricbeat-full from elastic/tap
==> Downloading https://artifacts.elastic.co/downloads/beats/metricbeat/metricbeat-7.4.2-darwin-x86_64.tar.gz?tap=elastic/homebrew-tap
######################################################################## 100.0%
==> Caveats
To have launchd start elastic/tap/metricbeat-full now and restart at login:
brew services start elastic/tap/metricbeat-full
Or, if you don't want/need a background service you can just run:
metricbeat
==> Summary
? /usr/local/Cellar/metricbeat-full/7.4.2: 62 files, 129.7MB, built in 1 minute 27 seconds
Ma version de MacOS :
$ uname -a
Darwin MacBook.local 19.0.0 Darwin Kernel Version 19.0.0: Thu Oct 17 16:17:15 PDT 2019; root:xnu-6153.41.3~29/RELEASE_X86_64 x86_64
Mise à jours de brew ( en mode verbose afin d’avoir plus d’information ) :
$ brew update -v
...
$ brew -v
Homebrew 2.1.16
Homebrew/homebrew-core (git revision 00c2c; last commit 2019-11-18)
Homebrew/homebrew-cask (git revision 9e283; last commit 2019-11-18)
Début de l’installation :
$ brew install prometheus
Updating Homebrew...
==> Auto-updated Homebrew!
Updated 2 taps (homebrew/core and homebrew/cask).
==> New Formulae
mysql-client@5.7
==> Updated Formulae
awsume innotop manticoresearch mysql-client mytop percona-xtrabackup
hydra kitchen-sync mydumper mysql-connector-c++ ntopng sysbench
imagemagick libzdb mysql++ mysql-connector-c++@1.1 percona-toolkit vala
==> Deleted Formulae
mysql-connector-c
==> Downloading https://homebrew.bintray.com/bottles/prometheus-2.13.1.catalina.bottle.tar.gz
==> Downloading from https://akamai.bintray.com/85/....
############################################################# 100.0%
==> Pouring prometheus-2.13.1.catalina.bottle.tar.gz
==> Caveats
When used with `brew services`, prometheus' configuration is stored as command line flags in
/usr/local/etc/prometheus.args
Example configuration:
echo "--config.file ~/.config/prometheus.yml" > /usr/local/etc/prometheus.args
To have launchd start prometheus now and restart at login:
brew services start prometheus
Or, if you don't want/need a background service you can just run:
prometheus
==> Summary
? /usr/local/Cellar/prometheus/2.13.1: 18 files, 119.3MB
Lancement du service :
Quand je fais des statistiques sur les erreurs dans system.log :
$ grep "com.apple.xpc.launchd" /var/log/system.log | sed 's/(/ /g' | sed 's/)/ /g' | sed 's/\[/ /g' | awk '{print $7}' | sort -n | uniq -c
1 com.apple.TMHelperAgent.SetupOffer
1 com.apple.bsd.dirhelper
2 com.apple.eospreflightagent
1 com.apple.preference.notifications.remoteservice
1 com.apple.preferences.softwareupdate.remoteservice
1 com.apple.screensharing
4 com.apple.xpc.launchd.domain.pid.IDECacheDeleteAppExtension.28627
4 com.apple.xpc.launchd.domain.pid.IDECacheDeleteAppExtension.49501
1 com.apple.xpc.launchd.domain.pid.mdmclient.52930
1 com.apple.xpc.launchd.domain.pid.signpost_reporter.27270
1 com.apple.xpc.launchd.domain.user.501
10854 com.arraynetworks.mhttpd
1 com.microsoft.OneDrive.FinderSync.XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
1 com.microsoft.OneDrive.FinderSync.XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
1 homebrew.mxcl.prometheus
Le vainqueur est com.arraynetworks.mhttpd … surtout à cause d’une erreur: “Service exited with abnormal code: 3”
J’ai pu voir que cela ne se lançait pas dans /usr/local/var/log/prometheus.err.log, sans arrêt j’ai l’erreur :
component=activeQueryTracker msg="Failed to create directory for logging active queries"
J’ai donc modifié le fichier /usr/local/Cellar/prometheus/2.13.1/homebrew.mxcl.prometheus.plist et pas le fichier /Users/XXXXXX/Library/LaunchAgents/homebrew.mxcl.prometheus.plist (inutile)
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>homebrew.mxcl.prometheus</string>
<key>ProgramArguments</key>
<array>
<string>sh</string>
<string>-c</string>
<string>/usr/local/opt/prometheus/bin/prometheus $(< /usr/local/etc/prometheus.args)</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<false/>
<key>WorkingDirectory</key>
<string>/usr/local/opt/prometheus</string>
<key>StandardErrorPath</key>
<string>/usr/local/var/log/prometheus.err.log</string>
<key>StandardOutPath</key>
<string>/usr/local/var/log/prometheus.log</string>
</dict>
</plist>
Je relance :
Et donc … j’ai vu toutes les applications 32 bits à supprimer.
Certaines n’existent plus … et d’autres il faut les mettre à jours.
Misère.