Hi.
I've created a pythontoolbox which consists of several scripts, each in it's own python-file. The toolbox works fine, and now I'm going to implement some logging. I've tried using the standard python module logging, but I'm not getting the results I'm expecting. Some times the logging works fine within the pythontoolbox, and other times nothing gets written to the logfile. If I restart ArcGIS Pro when the logging works, it stops working.
Does anyone have experience in using the logging-module? What is the best practice for using the logging module while working with pythontoolboxes?
I've also tried using just the logging.basicConfig. Using basicConfig works, but if there are any other pythontoolboxes also using basicConfig, everything gets written the same logfile even if filename in the basicConfig is different in the pythontoolboxes.
This is from my pyt, where I've created a new logger:
import logging
from CreateProjectTool import CreateProject
class Toolbox(object):
def __init__(self):
self.label = "Geoveg Toolbox"
self.alias = ""
handler = logging.FileHandler(filename='/temp/Geoveg.log')
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
handler.setFormatter(formatter)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(handler)
self.tools = [CreateProject]
In my other script (module), I have this code:
import logging
import traceback
import os
import sys
import zipfile
import arcpy
from pathlib import Path
logger = logging.getLogger(__name__)
class OpprettProsjekt(object):
def __init__(self):
"""Define the tool (tool name is the name of the class)."""
self.label = "Create new project"
self.description = ""
self.canRunInBackground = False
def getParameterInfo(self):
"""Define parameter definitions"""
workfolder = arcpy.Parameter(
displayName="Workfolder",
name="workfolder",
datatype="DEFolder",
parameterType="Required",
direction="Input")
params = [workfolder]
return params
def isLicensed(self):
"""Set whether tool is licensed to execute."""
return True
def updateParameters(self, parameters):
"""Modify the values and properties of parameters before internal
validation is performed. This method is called whenever a parameter
has been changed."""
return
def updateMessages(self, parameters):
"""Modify the messages created by internal validation for each tool
parameter. This method is called after internal validation."""
return
def execute(self, parameters, messages):
"""The source code of the tool."""
destinationFolder = "Geoveg"
folder = parameters[0].valueAsText
workfolder = os.path.join(folder, destinationFolder)
geovegTemplate = os.path.join(os.path.dirname(__file__), "Template")
try:
if os.path.exists(workfolder):
arcpy.AddMessage("Workfolder already exists")
logger.warning("Workfolder already exists.")
return
else:
# pakker ut innholdet i zip-filen med geoveg-mal
with zipfile.ZipFile(file=os.path.join(geovegMal, "Vegdata.zip"), mode='r') as archive:
archive.extractall(path=arbeidsMappe)
arcpy.AddMessage("Zip-file extracted at {0}".format(workfolder))
logger.info("Zip-file extracted at {0}".format(workfolder))
except:
# Get the traceback object
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
# Concatenate information together concerning the error into a message string
pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages(2) + "\n"
# Return Python error messages for use in script tool or Python window
arcpy.AddError(pymsg)
arcpy.AddError(msgs)
# Print Python error messages for use in Python / Python window
logger.error(pymsg)
logger.error(msgs)
return
def postExecute(self, parameters):
"""This method takes place after outputs are processed and
added to the display."""
return
Solved! Go to Solution.
Over the years, I have created a logger component that I copy into each of my python projects and it has worked quite well for me. It is built on top of the standard Python logging support. "info" messages go to both the console and a log file. "debug" messages go only to the log file. It also deletes old log files.
The logger is created with this code in logger.py (you can see how you can control which subfolder that logs are stored in and the prefix for the log file name, and the retention time for old log files). I use iPython for debugging so you could ignore/remove that code if it isn't applicable.
import os
import sys
import arcpy
import time
import glob
import logging
import logging.handlers
LOG_FILE = os.path.join(arcpy.env.scratchFolder, 'logs', 'fs', 'fs_log_%i.txt' % os.getpid())
# Log handler that forwards messages to ArcGIS
class ArcGisHandler(logging.Handler):
try:
get_ipython()
is_run_from_ipython = True
except NameError:
is_run_from_ipython = False
is_run_from_cmd_line = sys.stdin is not None and sys.stdin.isatty()
def emit(self, record):
if not self.is_run_from_cmd_line and not self.is_run_from_ipython:
arcpy.AddMessage(self.format(record))
# Set up logging
def get(log_name):
# Compute the full path for this process and make sure the directory exists
if not os.path.exists(os.path.dirname(LOG_FILE)):
os.makedirs(os.path.dirname(LOG_FILE))
# Clean up old log files
if not os.path.exists(LOG_FILE):
for file in glob.glob(os.path.join(arcpy.env.scratchFolder, 'logs', 'fs', 'fs_log_*.txt')):
if time.time() - os.path.getmtime(file) > 2 * 60 * 60 * 24:
try:
os.remove(file)
except:
pass
logger = logging.getLogger(log_name)
logger.disabled = False
logger.setLevel(logging.DEBUG)
if len(logger.handlers) == 0:
# File handler for detailed tracing
try:
formatter1 = logging.Formatter('%(asctime)s - %(levelname)s - %(module)s - %(message)s')
fh1 = logging.FileHandler(LOG_FILE)
fh1.setFormatter(formatter1)
fh1.setLevel(logging.DEBUG)
logger.addHandler(fh1)
except:
pass
# Standard out handler
try:
formatter2 = logging.Formatter('%(levelname)s - %(message)s')
fh2 = logging.StreamHandler(sys.stdout)
fh2.setFormatter(formatter2)
fh2.setLevel(logging.INFO)
logger.addHandler(fh2)
except:
pass
# Custom handler to send messages to ArcGIS
try:
if sys.version_info.major > 2:
formatter4 = logging.Formatter('%(asctime)s - %(message)s', '%H:%M:%S')
fh4 = ArcGisHandler()
fh4.setFormatter(formatter4)
fh4.setLevel(logging.INFO)
logger.addHandler(fh4)
except:
pass
return logger
In every module I include these two lines:
import logger
my_logger = logger.get('fs_log')
At my main entry point I spit out where the log file is:
my_logger.info ("Logging to %s" % logger.LOG_FILE)
Over the years, I have created a logger component that I copy into each of my python projects and it has worked quite well for me. It is built on top of the standard Python logging support. "info" messages go to both the console and a log file. "debug" messages go only to the log file. It also deletes old log files.
The logger is created with this code in logger.py (you can see how you can control which subfolder that logs are stored in and the prefix for the log file name, and the retention time for old log files). I use iPython for debugging so you could ignore/remove that code if it isn't applicable.
import os
import sys
import arcpy
import time
import glob
import logging
import logging.handlers
LOG_FILE = os.path.join(arcpy.env.scratchFolder, 'logs', 'fs', 'fs_log_%i.txt' % os.getpid())
# Log handler that forwards messages to ArcGIS
class ArcGisHandler(logging.Handler):
try:
get_ipython()
is_run_from_ipython = True
except NameError:
is_run_from_ipython = False
is_run_from_cmd_line = sys.stdin is not None and sys.stdin.isatty()
def emit(self, record):
if not self.is_run_from_cmd_line and not self.is_run_from_ipython:
arcpy.AddMessage(self.format(record))
# Set up logging
def get(log_name):
# Compute the full path for this process and make sure the directory exists
if not os.path.exists(os.path.dirname(LOG_FILE)):
os.makedirs(os.path.dirname(LOG_FILE))
# Clean up old log files
if not os.path.exists(LOG_FILE):
for file in glob.glob(os.path.join(arcpy.env.scratchFolder, 'logs', 'fs', 'fs_log_*.txt')):
if time.time() - os.path.getmtime(file) > 2 * 60 * 60 * 24:
try:
os.remove(file)
except:
pass
logger = logging.getLogger(log_name)
logger.disabled = False
logger.setLevel(logging.DEBUG)
if len(logger.handlers) == 0:
# File handler for detailed tracing
try:
formatter1 = logging.Formatter('%(asctime)s - %(levelname)s - %(module)s - %(message)s')
fh1 = logging.FileHandler(LOG_FILE)
fh1.setFormatter(formatter1)
fh1.setLevel(logging.DEBUG)
logger.addHandler(fh1)
except:
pass
# Standard out handler
try:
formatter2 = logging.Formatter('%(levelname)s - %(message)s')
fh2 = logging.StreamHandler(sys.stdout)
fh2.setFormatter(formatter2)
fh2.setLevel(logging.INFO)
logger.addHandler(fh2)
except:
pass
# Custom handler to send messages to ArcGIS
try:
if sys.version_info.major > 2:
formatter4 = logging.Formatter('%(asctime)s - %(message)s', '%H:%M:%S')
fh4 = ArcGisHandler()
fh4.setFormatter(formatter4)
fh4.setLevel(logging.INFO)
logger.addHandler(fh4)
except:
pass
return logger
In every module I include these two lines:
import logger
my_logger = logger.get('fs_log')
At my main entry point I spit out where the log file is:
my_logger.info ("Logging to %s" % logger.LOG_FILE)
Thank you for your quick reply! I'll try your solution 🙂
I've also done some changes in my script that seems to make it work better. It's to define the logger in the .pyt as the rootlogger (line 13) and make the handler to be the only one connected to the logger (line 15).
import logging
from CreateProjectTool import CreateProject
class Toolbox(object):
def __init__(self):
self.label = "Geoveg Toolbox"
self.alias = ""
handler = logging.FileHandler(filename='/temp/Geoveg.log')
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
handler.setFormatter(formatter)
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
logger.handlers = [handler]
self.tools = [CreateProject]