2015-04-23 89 views
0

我的腳本允許我將照片導入到特定的文件夾並通過JSON文件導入更新我的數據庫MongoDB。Python日誌旋轉和壓縮

我使用日誌旋轉和壓縮設置了一個日誌系統。

我有不同的問題:

  • 有些動作不loggued文件,但在控制檯中。 (subprocess.call
  • 記錄器創建了我三個文件而不是一個文件

enter image description here

python腳本:

def moveFTPFiles(serverName,userName,passWord,remotePath,localPath,deleteRemoteFiles=False,onlyDiff=False): 
    """Connect to an FTP server and bring down files to a local directory""" 
    import os 
    import sys 
    import glob 
    from sets import Set 
    import ftplib 

    logger.info(' Deleting Files ') 

    os.chdir(localDirectoryPath) 
    files=glob.glob('*.*') 
    for filename in files: 
     os.unlink(filename) 

    logger.info(' Retreiving Files ') 

    try: 
     ftp = ftplib.FTP(serverName) 
     ftp.login(userName,passWord) 
     ftp.cwd(remotePath) 
     logger.info(' Connecting ') 
     if onlyDiff: 
      lFileSet = Set(os.listdir(localPath)) 
      rFileSet = Set(ftp.nlst()) 
      transferList = list(rFileSet - lFileSet) 
      logger.info(' Missing ' + str(len(transferList))) 
     else: 
      transferList = ftp.nlst() 
     delMsg = "" 
     filesMoved = 0 
     for fl in transferList: 
      # create a full local filepath 
      localFile = localPath + fl 
      # print "Create a full local filepath: " + localFile 
      grabFile = True 
      if grabFile:     
       #open a the local file 
       fileObj = open(localFile, 'wb') 
       # Download the file a chunk at a time using RETR 
       ftp.retrbinary('RETR ' + fl, fileObj.write) 
       # Close the file 
       # print "Close the file " 
       fileObj.close() 
       filesMoved += 1 
       #print "Uploaded: " + str(filesMoved) 
       #sys.stdout.write(str(filesMoved)+' ') 
       #sys.stdout.flush() 

      # Delete the remote file if requested 
      if deleteRemoteFiles: 
       ftp.delete(fl) 
       delMsg = " and Deleted"   

     logger.info('Files Moved' + delMsg + ': ' + str(filesMoved) + ' on ' + timeStamp()) 
    except ftplib.all_errors as e: 
     logger.error('We have a problem on moveFTPFiles' + '%s' % e) 
    ftp.close() # Close FTP connection 
    ftp = None 

def timeStamp(): 
    """returns a formatted current time/date""" 
    import time 
    return str(time.strftime("%a %d %b %Y %I:%M:%S %p")) 

def importData(serverName,userName,passWord,directory,filematch,source,destination): 
    import socket 
    import ftplib 
    import os 
    import subprocess 
    import json 

    try: 
     ftp = ftplib.FTP(serverName) 
     ftp.login(userName,passWord) 
     ftp.cwd(directory) 
     logger.info(' Connecting ') 
     # Loop through matching files and download each one individually 
     for filename in ftp.nlst(filematch): 
      fhandle = open(filename, 'wb') 
      logger.info(' Getting ' + filename) 
      ftp.retrbinary('RETR ' + filename, fhandle.write) 
      fhandle.close() 
     #convert xml to json 
     logger.info(' Convert ' + filename + ' to .json ') 
     subprocess.call('xml2json -t xml2json -o stockvo.json stockvo.xml --strip_text', shell=True) 
     #remove xml file 
     logger.info(' Delete ' + filename) 
     os.unlink(source+'stockvo.xml') 
     #modify json file 
     logger.info(' Modify .json file') 
     data = json.loads(open("stockvo.json").read()) 
     with open("stockvo.json", "w") as outfile: 
       json.dump(data["Stock"]["Vehicule"], outfile) 

     #import json file to MongoDB 
     logger.info(' Import json file to MongoDB') 
     subprocess.call('mongoimport --db AutoPrivilege -c cars stockvo.json --jsonArray --upsert --drop',shell=True) 

     #remove old json file 
     logger.info('Delete old .json file') 
     ## if file exists, delete it ## 
     myfile=destination+"stockvo.json" 
     if os.path.isfile(myfile): 
      os.remove(myfile) 
     #os.unlink(destination+'stockvo.json') 

     #move json file 
     logger.info('Move .json') 
     os.system('mv %s %s' % (source+'stockvo.json', destination+'stockvo.json')) 

    except ftplib.all_errors as e: 
     logger.error('We have a problem on importData' + '%s' % e) 
    ftp.close() # Close FTP connection 
    ftp = None 

import time 
import re 
import os 
import stat 
import logging 
import logging.handlers as handlers 

class SizedTimedRotatingFileHandler(handlers.TimedRotatingFileHandler): 
    """ 
    Handler for logging to a set of files, which switches from one file 
    to the next when the current file reaches a certain size, or at certain 
    timed intervals 
    """ 
    def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, 
       delay=0, when='h', interval=1, utc=False): 
     # If rotation/rollover is wanted, it doesn't make sense to use another 
     # mode. If for example 'w' were specified, then if there were multiple 
     # runs of the calling application, the logs from previous runs would be 
     # lost if the 'w' is respected, because the log file would be truncated 
     # on each run. 
     if maxBytes > 0: 
      mode = 'a' 
     handlers.TimedRotatingFileHandler.__init__(
      self, filename, when, interval, backupCount, encoding, delay, utc) 
     self.maxBytes = maxBytes 

    def shouldRollover(self, record): 
     """ 
     Determine if rollover should occur. 

     Basically, see if the supplied record would cause the file to exceed 
     the size limit we have. 
     """ 
     if self.stream is None:     # delay was set... 
      self.stream = self._open() 
     if self.maxBytes > 0:     # are we rolling over? 
      msg = "%s\n" % self.format(record) 
      self.stream.seek(0, 2) #due to non-posix-compliant Windows feature 
      if self.stream.tell() + len(msg) >= self.maxBytes: 
       return 1 
     t = int(time.time()) 
     if t >= self.rolloverAt: 
      return 1 
     return 0 

if __name__ == '__main__':  

    #log to a file 
    log_filename='/opt/log/importData.log' 
    logger=logging.getLogger('importData') 
    logger.setLevel(logging.DEBUG) 
    handler=SizedTimedRotatingFileHandler(
     log_filename, maxBytes=100, backupCount=5, 
     when='s',interval=10, 
     # encoding='bz2', # uncomment for bz2 compression 
    ) 
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') 
    handler.setFormatter(formatter) 
    logger.addHandler(handler) 

    #--- constant connection values 
    ftpServerName = "xxxxx.xxxxxx" 
    ftpU = "xxxxxxxx" 
    ftpP = "xxxxxx" 
    remoteDirectoryPath = "/xxxxxx/xxxxxx/xxxxxx/xxxxxxx/" 
    localDirectoryPath = "/xxxxx/xxxxxxxxx/xxxxxxxx/xxxxxxx/" 

    directory = '/xxxxxxx/' 
    filematch = '*.xml' 
    source='/xxxxxx/xxxxxxxx/' 
    destination='/xxxxxxxx/xxxxxx/' 

    deleteAfterCopy = False  #set to true if you want to clean out the remote directory 
    onlyNewFiles = True   #set to true to grab & overwrite all files locally 
    importData(ftpServerName,ftpU,ftpP,directory,filematch,source,destination) 
    moveFTPFiles(ftpServerName,ftpU,ftpP,remoteDirectoryPath,localDirectoryPath,deleteAfterCopy,onlyNewFiles) 

importData.log:

2015年4月23日11:33:57408個INFO文件感動:1145於星期四2015年4月23日上午11點33分57秒

importData.log.2015-04-23_11- 33-40:

2015年4月23日11:33:40896個INFO刪除文件2015年4月23日11:33:40956個 INFO Retreiving文件

importData.log2015-04-23_11-33-41:

2015年4月23日11:33:41386 INFO連接2015年4月23日11:33:41825 INFO 缺少1145

任何人都可以在python中提出一種方法來解決我的問題。

回答

1
SizedTimedRotatingFileHandler(log_filename, maxBytes=100, ... 

確切地說它配置的是 - 最多100個字節將被記錄到一個文件中。將最大大小增加到幾百(百?)兆字節,您將不經常旋轉。

關於只對文件進行部分日誌記錄,只能爲'importData'模塊定義處理程序。其他模塊將寫入默認處理程序(可能是控制檯)。

關於subprocess.call()本身,它實際上並沒有記錄任何東西。除非使用參數stdout捕獲輸出,否則輸出將打印到正常的stdout。您需要設置一個管道,然後將其讀入記錄器。

+0

ok thx,我如何定義所有模塊的處理程序? – Mercer

+0

在文檔中稱爲根處理程序。空字符串會給你:'logging.getLogger('')。addHandler(...)' – viraptor