Non-point features: Vertex to point(), Point features: get copied into a temporary FC
Combine all those points
Minimum Bounding Geometry()
Buffer()
All in all, it's pretty fast. It'd be faster if I didn't have some validation going to prevent errors.
With 86 total features, the entire thing runs about 35-40 seconds.
I'd like it to be faster, but it looks like the bottleneck is during the clean-up process; each interim file takes about 3.5 seconds to delete, and there are 2+X interim files for each run. So combining 3 feature classes means there's about 20 seconds spent just cleaning.
I could definitely stand to figure out how to get the spatial reference to default to the map's spatial reference.
I also had to manually prevent the user from using ObjectID, Shape, etc. as identifying fields. I'd definitely like to figure out how to just get them out of the field list in the first place.
#-------------------------------------------------------------------------------
# Name: SiteBuffers
# Purpose: Create one unified buffer around features from different feature classes sharing the same field value.
# E.g. A buffer around a collection of points, lines, and polygons belonging to "Site 1001".
# This does not work on Shapefiles.
# Author: Alfred Baldenweck, ALKA
# Date: March 9, 2022
#-------------------------------------------------------------------------------
def main():
import arcpy, os, datetime
from os.path import join
'''Set-up'''
inFCs = arcpy.GetParameter(0)
nameField = arcpy.GetParameterAsText(1)
buffDist = arcpy.GetParameter(2)
outputLocation = arcpy.GetParameterAsText(3)
outputName = arcpy.GetParameterAsText(4)
lyrAlias = arcpy.GetParameterAsText(5)# Optional Parameter
spatRef= arcpy.GetParameter(6)
addOutput = arcpy.GetParameterAsText(7)
if lyrAlias == "": #if left blank, just call it the FC name.
lyrAlias= outputName
arcpy.env.workspace = outputLocation
scratchDB = arcpy.env.scratchWorkspace #This gets called to prevent issues when exporting to a shapefile.
start= datetime.datetime.now() #begin time, used for tracking progress throughout.
'''Validation'''
outputName= arcpy.ValidateTableName(outputName)
if ((nameField.lower() == "OBJECTID".lower()) or ("Shape".lower() in nameField.lower()) or (nameField.lower() == "GlobalID".lower()) or (nameField.lower() == "GUID".lower())):
arcpy.AddError(f"Field name \"{nameField}\" is Invalid. Please choose a different field")
return
fcList = [] # Create list of unique FCs in the input. Prevents it from running twice if you added from the map and the GDB on accident.
reportShps= [] # Create list of Shapefiles that are skipped.
reportHfcs = [] # Create list of Hosted Feature Classes that are skipped.
for FC in inFCs: #Finds the data source for each item in the input
arcpy.AddMessage(f"{FC}")
if arcpy.Describe(FC).dataType == "FeatureClass":
if ("https://" in f"{FC}"): #Check if hosted 1
reportHfcs.append(f"{FC}")
else:
FC = str(FC).replace("'","") #Fixes issues with spaces in GDB names
fcList.append(FC)
elif arcpy.Describe(FC).dataType == "ShapeFile": #shapefile check part 1
reportShps.append(f"{FC}")
elif arcpy.Describe(FC).dataType == "FeatureLayer":
if ("https://" in FC.dataSource): #Check if hosted 2
reportHfcs.append(f"{FC}")
elif (".gdb\\" not in FC.dataSource): #Shapefile check part 2
reportShps.append(f"{FC}")
else: #Anything left at this point should be a feature class
FC = FC.dataSource
fcList.append(FC)
arcpy.AddMessage(f" Just finished grabbing unique features: {datetime.datetime.now()- start}")
fcList = list(set(fcList)) # a UNIQUE() function on the list of FCs
reportShps = list(set(reportShps)) # a UNIQUE() function on the list of shapefiles
reportHfcs = list(set(reportHfcs)) # a UNIQUE() function on the list of Hosted Features
reportSkips = [] #List of invalid FCs.
cleanPoints= [] #empty list, used to contain the vertex point feature classes.
'''Begin work'''
if len(fcList) == 0:
arcpy.AddWarning(f"No valid feature classes were input.")
return
for FC in fcList:
if nameField in[field.name for field in arcpy.ListFields(FC)]: #Don't bother if the field doesn't exist.
desc= arcpy.Describe(FC)
tempOut = join(outputLocation,f"{FC}_temp")
if desc.shapeType == 'Point':
FC = arcpy.management.Copy(FC, tempOut)
cleanPoints.append(FC)
else:
FC = arcpy.management.FeatureVerticesToPoints(FC, tempOut, "ALL")
cleanPoints.append(FC)
else:
reportSkips.append(FC) #send invalid FCs to the report list.
arcpy.AddMessage(f" Just finished prepping points for merging: {datetime.datetime.now()- start}")
if len(cleanPoints) >0:
#create blank feature class to use to append stuff
pointsMerge = arcpy.management.CreateFeatureclass(scratchDB, "pointsMerge_TOOLDerive", "POINT", spatial_reference = spatRef )
#create a file path for the convex hull used as the minimum site boundary
minSiteBound = join(outputLocation, "minSiteBound_TOOLDerive")
#add the name field (Site Name) to the temporary class so those values are preserved.
arcpy.management.AddField(pointsMerge, nameField, "TEXT")
arcpy.AddMessage(f" Just finished adding the name field and creating blank files: {datetime.datetime.now()- start}")
#append all the points to the same blank feature
arcpy.management.Append(cleanPoints, pointsMerge, 'NO_TEST')
arcpy.AddMessage(f" Just finished merging all the points into one file: {datetime.datetime.now()- start}")
#create a minimum site boundary around the site.
minSiteBound = arcpy.management.MinimumBoundingGeometry(pointsMerge, minSiteBound, 'CONVEX_HULL', 'LIST', nameField)
arcpy.AddMessage(f" Just finished making minimum boundary: {datetime.datetime.now()- start}")
#create a buffer at the specified distance
siteBound= arcpy.Buffer_analysis(minSiteBound, outputName, buffDist)
arcpy.AddMessage(f" Just finished the buffer: {datetime.datetime.now()- start}")
arcpy.management.DeleteField(siteBound, "ORIG_FID") #We don't need to have this field
arcpy.AddMessage(f" Just deleted an unnecessary field: {datetime.datetime.now()- start}")
''' Clean up the temp files'''
for clean in cleanPoints:
arcpy.management.Delete(clean)
#arcpy.AddMessage(f" Just deleted {clean}: {datetime.datetime.now()- start}")
arcpy.management.Delete(pointsMerge)
arcpy.management.Delete(minSiteBound)
arcpy.AddMessage(f" Just finished cleaning up interim files: {datetime.datetime.now()- start}")
'''Add layer to map'''
p = arcpy.mp.ArcGISProject("CURRENT")
mapA = p.activeMap
if mapA and (addOutput == "true"):
lyr = arcpy.management.MakeFeatureLayer(siteBound, lyrAlias).getOutput(0)
mapA.addLayer(lyr)
arcpy.AddMessage(f" Just added the product to the map: {datetime.datetime.now()- start}")
'''Reporting'''
if len(reportHfcs) >0:
if len(reportHfcs) == 1:
reportHfcs = (" ").join(reportHfcs)
arcpy.AddWarning(f"Skipped: {reportHfcs} is a hosted feature class and could not be processed. Please export to a local GDB and try again.")
elif len(reportHfcs) >1:
reportHfcs = (", ").join(reportHfcs)
arcpy.AddWarning(f"Skipped: {reportHfcs} are hosted feature classes and could not be processed. Please export to a local GDB and try again.")
if len(reportShps) >0:
if len(reportShps) == 1:
reportShps = (" ").join(reportShps)
arcpy.AddWarning(f"Skipped: {reportShps} is a shapefile and could not be processed.")
elif len(reportShps) >1:
reportShps = (", ").join(reportShps)
arcpy.AddWarning(f"Skipped: {reportShps} are shapefiles and could not be processed.")
if len(reportSkips) >0:
if len(reportSkips) == 1:
reportSkips = (" ").join(reportSkips)
elif len(reportSkips) >1:
reportSkips = (", ").join(reportSkips)
arcpy.AddWarning(f"Skipped: {reportSkips}. {nameField} not found")
else:
arcpy.AddWarning(f"Tool Unsuccessful. Field: {nameField} not found in any of the input layers.")
if __name__ == '__main__':
main()
That won't happen. Converting to points will only make matters worse even with a dissolve of the overlapping buffers. Batch buffer to get the buffers for each geometry, then union the featureclasses together and dissolve the overlaps from there.
Non-point features: Vertex to point(), Point features: get copied into a temporary FC
Combine all those points
Minimum Bounding Geometry()
Buffer()
All in all, it's pretty fast. It'd be faster if I didn't have some validation going to prevent errors.
With 86 total features, the entire thing runs about 35-40 seconds.
I'd like it to be faster, but it looks like the bottleneck is during the clean-up process; each interim file takes about 3.5 seconds to delete, and there are 2+X interim files for each run. So combining 3 feature classes means there's about 20 seconds spent just cleaning.
I could definitely stand to figure out how to get the spatial reference to default to the map's spatial reference.
I also had to manually prevent the user from using ObjectID, Shape, etc. as identifying fields. I'd definitely like to figure out how to just get them out of the field list in the first place.
#-------------------------------------------------------------------------------
# Name: SiteBuffers
# Purpose: Create one unified buffer around features from different feature classes sharing the same field value.
# E.g. A buffer around a collection of points, lines, and polygons belonging to "Site 1001".
# This does not work on Shapefiles.
# Author: Alfred Baldenweck, ALKA
# Date: March 9, 2022
#-------------------------------------------------------------------------------
def main():
import arcpy, os, datetime
from os.path import join
'''Set-up'''
inFCs = arcpy.GetParameter(0)
nameField = arcpy.GetParameterAsText(1)
buffDist = arcpy.GetParameter(2)
outputLocation = arcpy.GetParameterAsText(3)
outputName = arcpy.GetParameterAsText(4)
lyrAlias = arcpy.GetParameterAsText(5)# Optional Parameter
spatRef= arcpy.GetParameter(6)
addOutput = arcpy.GetParameterAsText(7)
if lyrAlias == "": #if left blank, just call it the FC name.
lyrAlias= outputName
arcpy.env.workspace = outputLocation
scratchDB = arcpy.env.scratchWorkspace #This gets called to prevent issues when exporting to a shapefile.
start= datetime.datetime.now() #begin time, used for tracking progress throughout.
'''Validation'''
outputName= arcpy.ValidateTableName(outputName)
if ((nameField.lower() == "OBJECTID".lower()) or ("Shape".lower() in nameField.lower()) or (nameField.lower() == "GlobalID".lower()) or (nameField.lower() == "GUID".lower())):
arcpy.AddError(f"Field name \"{nameField}\" is Invalid. Please choose a different field")
return
fcList = [] # Create list of unique FCs in the input. Prevents it from running twice if you added from the map and the GDB on accident.
reportShps= [] # Create list of Shapefiles that are skipped.
reportHfcs = [] # Create list of Hosted Feature Classes that are skipped.
for FC in inFCs: #Finds the data source for each item in the input
arcpy.AddMessage(f"{FC}")
if arcpy.Describe(FC).dataType == "FeatureClass":
if ("https://" in f"{FC}"): #Check if hosted 1
reportHfcs.append(f"{FC}")
else:
FC = str(FC).replace("'","") #Fixes issues with spaces in GDB names
fcList.append(FC)
elif arcpy.Describe(FC).dataType == "ShapeFile": #shapefile check part 1
reportShps.append(f"{FC}")
elif arcpy.Describe(FC).dataType == "FeatureLayer":
if ("https://" in FC.dataSource): #Check if hosted 2
reportHfcs.append(f"{FC}")
elif (".gdb\\" not in FC.dataSource): #Shapefile check part 2
reportShps.append(f"{FC}")
else: #Anything left at this point should be a feature class
FC = FC.dataSource
fcList.append(FC)
arcpy.AddMessage(f" Just finished grabbing unique features: {datetime.datetime.now()- start}")
fcList = list(set(fcList)) # a UNIQUE() function on the list of FCs
reportShps = list(set(reportShps)) # a UNIQUE() function on the list of shapefiles
reportHfcs = list(set(reportHfcs)) # a UNIQUE() function on the list of Hosted Features
reportSkips = [] #List of invalid FCs.
cleanPoints= [] #empty list, used to contain the vertex point feature classes.
'''Begin work'''
if len(fcList) == 0:
arcpy.AddWarning(f"No valid feature classes were input.")
return
for FC in fcList:
if nameField in[field.name for field in arcpy.ListFields(FC)]: #Don't bother if the field doesn't exist.
desc= arcpy.Describe(FC)
tempOut = join(outputLocation,f"{FC}_temp")
if desc.shapeType == 'Point':
FC = arcpy.management.Copy(FC, tempOut)
cleanPoints.append(FC)
else:
FC = arcpy.management.FeatureVerticesToPoints(FC, tempOut, "ALL")
cleanPoints.append(FC)
else:
reportSkips.append(FC) #send invalid FCs to the report list.
arcpy.AddMessage(f" Just finished prepping points for merging: {datetime.datetime.now()- start}")
if len(cleanPoints) >0:
#create blank feature class to use to append stuff
pointsMerge = arcpy.management.CreateFeatureclass(scratchDB, "pointsMerge_TOOLDerive", "POINT", spatial_reference = spatRef )
#create a file path for the convex hull used as the minimum site boundary
minSiteBound = join(outputLocation, "minSiteBound_TOOLDerive")
#add the name field (Site Name) to the temporary class so those values are preserved.
arcpy.management.AddField(pointsMerge, nameField, "TEXT")
arcpy.AddMessage(f" Just finished adding the name field and creating blank files: {datetime.datetime.now()- start}")
#append all the points to the same blank feature
arcpy.management.Append(cleanPoints, pointsMerge, 'NO_TEST')
arcpy.AddMessage(f" Just finished merging all the points into one file: {datetime.datetime.now()- start}")
#create a minimum site boundary around the site.
minSiteBound = arcpy.management.MinimumBoundingGeometry(pointsMerge, minSiteBound, 'CONVEX_HULL', 'LIST', nameField)
arcpy.AddMessage(f" Just finished making minimum boundary: {datetime.datetime.now()- start}")
#create a buffer at the specified distance
siteBound= arcpy.Buffer_analysis(minSiteBound, outputName, buffDist)
arcpy.AddMessage(f" Just finished the buffer: {datetime.datetime.now()- start}")
arcpy.management.DeleteField(siteBound, "ORIG_FID") #We don't need to have this field
arcpy.AddMessage(f" Just deleted an unnecessary field: {datetime.datetime.now()- start}")
''' Clean up the temp files'''
for clean in cleanPoints:
arcpy.management.Delete(clean)
#arcpy.AddMessage(f" Just deleted {clean}: {datetime.datetime.now()- start}")
arcpy.management.Delete(pointsMerge)
arcpy.management.Delete(minSiteBound)
arcpy.AddMessage(f" Just finished cleaning up interim files: {datetime.datetime.now()- start}")
'''Add layer to map'''
p = arcpy.mp.ArcGISProject("CURRENT")
mapA = p.activeMap
if mapA and (addOutput == "true"):
lyr = arcpy.management.MakeFeatureLayer(siteBound, lyrAlias).getOutput(0)
mapA.addLayer(lyr)
arcpy.AddMessage(f" Just added the product to the map: {datetime.datetime.now()- start}")
'''Reporting'''
if len(reportHfcs) >0:
if len(reportHfcs) == 1:
reportHfcs = (" ").join(reportHfcs)
arcpy.AddWarning(f"Skipped: {reportHfcs} is a hosted feature class and could not be processed. Please export to a local GDB and try again.")
elif len(reportHfcs) >1:
reportHfcs = (", ").join(reportHfcs)
arcpy.AddWarning(f"Skipped: {reportHfcs} are hosted feature classes and could not be processed. Please export to a local GDB and try again.")
if len(reportShps) >0:
if len(reportShps) == 1:
reportShps = (" ").join(reportShps)
arcpy.AddWarning(f"Skipped: {reportShps} is a shapefile and could not be processed.")
elif len(reportShps) >1:
reportShps = (", ").join(reportShps)
arcpy.AddWarning(f"Skipped: {reportShps} are shapefiles and could not be processed.")
if len(reportSkips) >0:
if len(reportSkips) == 1:
reportSkips = (" ").join(reportSkips)
elif len(reportSkips) >1:
reportSkips = (", ").join(reportSkips)
arcpy.AddWarning(f"Skipped: {reportSkips}. {nameField} not found")
else:
arcpy.AddWarning(f"Tool Unsuccessful. Field: {nameField} not found in any of the input layers.")
if __name__ == '__main__':
main()
';
}
}
}
catch(e){
}
}
}
if (newSub.getAttribute("slang").toLowerCase() != code_l.toLowerCase()) {
if (trLabelsHtml != "") {
var labelSname = "";
if(labelEle[i].querySelector("ul li:nth-child(1)").getAttribute("aria-hidden")){
labelSname = labelEle[i].querySelector("ul li:nth-child(1)").outerHTML;
}
labelEle[i].innerHTML = "";
labelEle[i].innerHTML = labelSname + trLabelsHtml;
}
}
}
}
}
catch(e){
}
}
}
/* V 2.0:3 = Store not translated reply id */
if(lingoRSXML.snapshotLength == 0){
if($scope.falseReplyID == "") {
$scope.falseReplyID = value;
}
}
/* Get translated Body of Replies/Comments */
var lingoRBXML = doc.evaluate(lingoRBExp, doc, null, XPathResult.UNORDERED_NODE_SNAPSHOT_TYPE, null);
for(var i=0;i 0) {
var attachDiv = rootElement.querySelector('div.lia-quilt-row-main').querySelector('div.custom-attachments');
if (attachDiv) {
attachDiv = attachDiv.outerHTML;
}
else if(rootElement.querySelector('div.lia-quilt-row-main').querySelectorAll('#attachments').length > 0){
if ("ForumTopicPage" == "BlogArticlePage") {
attachDiv = rootElement.querySelector('div.lia-quilt-row-main .lia-message-body-content').querySelector('#attachments');
if (attachDiv) {
attachDiv = attachDiv.outerHTML;
}
else{
attachDiv = "";
}
}else{
attachDiv = rootElement.querySelector('div.lia-quilt-row-main').querySelector('#attachments').outerHTML;
}
}
else {
attachDiv = "";
}
/* Feedback Div */
var feedbackDiv = "";
var feedbackDivs = rootElement.querySelector('div.lia-quilt-row-main').querySelectorAll('div.lia-panel-feedback-banner-safe');
if (feedbackDivs.length > 0) {
for (var k = 0; k < feedbackDivs.length; k++) {
feedbackDiv = feedbackDiv + feedbackDivs[k].outerHTML;
}
}
}
else {
var attachDiv = rootElement.querySelector('div.lia-message-body-content').querySelector('div.Attachments.preview-attachments');
if (attachDiv) {
attachDiv = attachDiv.outerHTML;
} else {
attachDiv = "";
}
/* Everyone tags links */
if (document.querySelectorAll("div.TagList").length > 0){
var everyoneTagslink = document.querySelector('div.lia-quilt-row-main').querySelector(".MessageTagsTaplet .TagList");
if ((everyoneTagslink != null)||(everyoneTagslink != undefined)){
everyoneTagslink = everyoneTagslink.outerHTML;
}
else{
everyoneTagslink = "";
}
}
/* Feedback Div */
var feedbackDiv = "";
var feedbackDivs = rootElement.querySelector('div.lia-message-body-content').querySelectorAll('div.lia-panel-feedback-banner-safe');
if (feedbackDivs.length > 0) {
for (var m = 0; m < feedbackDivs.length; m++) {
feedbackDiv = feedbackDiv + feedbackDivs[m].outerHTML;
}
}
}
}
} catch (e) {
}
if (body_L == "") {
/* V 2.0:7 Replacing translated video data with source video data */
var newBodyVideoData = newBody.querySelectorAll('div[class*="video-embed"]');
angular.forEach($scope.videoData[value], function (sourceVideoElement, index) {
if (index <= (newBodyVideoData.length - 1)) {
newBodyVideoData[index].outerHTML = sourceVideoElement.outerHTML
}
});
/* V 2.0:7 = Replacing translated image data with source data */
var newBodyImageData = newBody.querySelectorAll('[class*="lia-image"]');
angular.forEach($scope.imageData[value], function (sourceImgElement, index) {
if (index <= (newBodyImageData.length - 1)) {
newBodyImageData[index].outerHTML = sourceImgElement.outerHTML;
}
});
/* V 2.0:7 = Replacing translated pre tag data with source data */
var newBodyPreTagData = newBody.querySelectorAll('pre');
angular.forEach($scope.preTagData[value], function (sourcePreTagElement, index) {
if (index <= (newBodyPreTagData.length - 1)) {
newBodyPreTagData[index].outerHTML = sourcePreTagElement.outerHTML;
}
});
}
var copyBodySubject = false;
if (body_L == "") {
copyBodySubject = true;
body_L = newBody.innerHTML;
}
/* This code is written as part of video fix by iTalent */
/* try{
var iframeHTMLText = body_L;
var searchIframeText = "<IFRAME";
var foundiFrameTag;
if (iframeHTMLText.indexOf(searchIframeText) > -1) {
foundiFrameTag = decodeHTMLEntities(iframeHTMLText);
foundiFrameTag = foundiFrameTag.split('src="')[1];
body_L = foundiFrameTag;
}
}
catch(e){
} */
/* This code is placed to remove the extra meta tag adding in the UI*/
try{
body_L = body_L.replace('<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />','');
}
catch(e){
}
/** We should not replace the source content if user profile language and selected target language matches with source language **/
if(showTrContent) {
var compiled = false;
rootElement.querySelectorAll('div.lia-message-body-content')[0].innerHTML = null
if("ForumTopicPage"=="IdeaPage"){
// var customAttachDiv = '';
rootElement.querySelectorAll('div.lia-message-body-content')[0].innerHTML = body_L + feedbackDiv ;
$compile(rootElement.querySelectorAll('div.lia-message-body-content')[0])($scope);
compiled = true;
/* Attach atttach div */
// document.querySelector("div.translation-attachments-"+value).innerHTML = attachDiv;
rootElement.querySelectorAll('div.lia-message-body-content')[0].insertAdjacentHTML('afterend',attachDiv);
if(rootElement.querySelectorAll('div.lia-quilt-idea-message .lia-message-body .lia-attachments-message').length > 1){
rootElement.querySelectorAll('div.lia-quilt-idea-message .lia-message-body .lia-attachments-message')[1].remove();
}
} else {
if("ForumTopicPage"=="TkbArticlePage"){
rootElement.querySelectorAll('div.lia-message-body-content')[0].innerHTML = body_L + feedbackDiv ;
}else{
rootElement.querySelectorAll('div.lia-message-body-content')[0].innerHTML = body_L + feedbackDiv + attachDiv;
compiled = true;
}
}
/* Destroy and recreate OOyala player videos to restore the videos in target languages which is written by iTalent as part of iTrack LILICON-79 */ /* Destroy and recreate OOyala player videos */
try{
// $scope.videoData[value][0].querySelector("div").getAttribute("id");
for(var vidIndex=0; vidIndex<$scope.videoData[value].length; vidIndex++){
if( $scope.videoData[value][vidIndex].querySelector("div") != null){
var containerId = LITHIUM.OOYALA.players[$scope.videoData[value][vidIndex].querySelector("div").getAttribute("id")].containerId;
videoId = LITHIUM.OOYALA.players[$scope.videoData[value][vidIndex].querySelector("div").getAttribute("id")].videoId;
/** Get the Video object */
vid = OO.Player.create(containerId,videoId);
/** Destroy the video **/
vid.destroy();
/** recreate in the same position */
var vid = OO.Player.create(containerId,videoId);
}
}
}
catch(e){
}
try{
for(var vidIndex=0; vidIndex<($scope.videoData[value].length); vidIndex++){
if($scope.videoData[value][vidIndex].querySelector('video-js') != null){
var data_id = $scope.videoData[value][vidIndex].querySelector('video-js').getAttribute('data-video-id');
var data_account = $scope.videoData[value][vidIndex].querySelector('video-js').getAttribute('data-account');
var data_palyer = $scope.videoData[value][vidIndex].querySelector('video-js').getAttribute('data-player');
var div = document.createElement('div');
div.id = "brightcove";
div.class = "brightcove-player";
div.innerHTML =
'(view in my videos)'
var data = div.getElementsByClassName("video-js");
var script = document.createElement('script');
script.src = "https://players.brightcove.net/" + data_account + "/" + data_palyer + "_default/index.min.js";
for(var i=0;i< data.length;i++){
videodata.push(data[i]);
}
}
}
for(var i=0;i< videodata.length;i++){
document.getElementsByClassName('lia-vid-container')[i].innerHTML = videodata[i].outerHTML;
document.body.appendChild(script);
}
}
catch(e){
}
if(!compiled){
/* Re compile html */
$compile(rootElement.querySelectorAll('div.lia-message-body-content')[0])($scope);
}
}
if (code_l.toLowerCase() != newBody.getAttribute("slang").toLowerCase()) {
/* Adding Translation flag */
var tr_obj = $filter('filter')($scope.sourceLangList, function (obj_l) {
return obj_l.code.toLowerCase() === newBody.getAttribute("slang").toLowerCase()
});
if (tr_obj.length > 0) {
tr_text = "Esri may utilize third parties to translate your data and/or imagery to facilitate communication across different languages.".replace(/lilicon-trans-text/g, tr_obj[0].title);
try {
if ($scope.wootMessages[$rootScope.profLang] != undefined) {
tr_text = $scope.wootMessages[$rootScope.profLang].replace(/lilicon-trans-text/g, tr_obj[0].title);
}
} catch (e) {
}
} else {
//tr_text = "This message was translated for your convenience!";
tr_text = "Esri may utilize third parties to translate your data and/or imagery to facilitate communication across different languages.";
}
try {
if (!document.getElementById("tr-msz-" + value)) {
var tr_para = document.createElement("P");
tr_para.setAttribute("id", "tr-msz-" + value);
tr_para.setAttribute("class", "tr-msz");
tr_para.style.textAlign = 'justify';
var tr_fTag = document.createElement("IMG");
tr_fTag.setAttribute("class", "tFlag");
tr_fTag.setAttribute("src", "/html/assets/langTrFlag.PNG");
tr_fTag.style.marginRight = "5px";
tr_fTag.style.height = "14px";
tr_para.appendChild(tr_fTag);
var tr_textNode = document.createTextNode(tr_text);
tr_para.appendChild(tr_textNode);
/* Woot message only for multi source */
if(rootElement.querySelector(".lia-quilt-forum-message")){
rootElement.querySelector(".lia-quilt-forum-message").appendChild(tr_para);
} else if(rootElement.querySelector(".lia-message-view-blog-topic-message")) {
rootElement.querySelector(".lia-message-view-blog-topic-message").appendChild(tr_para);
} else if(rootElement.querySelector(".lia-quilt-blog-reply-message")){
rootElement.querySelector(".lia-quilt-blog-reply-message").appendChild(tr_para);
} else if(rootElement.querySelector(".lia-quilt-tkb-message")){
rootElement.querySelector(".lia-quilt-tkb-message").appendChild(tr_para);
} else if(rootElement.querySelector(".lia-quilt-tkb-reply-message")){
rootElement.querySelector(".lia-quilt-tkb-reply-message").insertBefore(tr_para,rootElement.querySelector(".lia-quilt-row.lia-quilt-row-footer"));
} else if(rootElement.querySelector(".lia-quilt-idea-message")){
rootElement.querySelector(".lia-quilt-idea-message").appendChild(tr_para);
} else if(rootElement.querySelector('.lia-quilt-occasion-message')){
rootElement.querySelector('.lia-quilt-occasion-message').appendChild(tr_para);
}
else {
if (rootElement.querySelectorAll('div.lia-quilt-row-footer').length > 0) {
rootElement.querySelectorAll('div.lia-quilt-row-footer')[0].appendChild(tr_para);
} else {
rootElement.querySelectorAll('div.lia-quilt-column-message-footer')[0].appendChild(tr_para);
}
}
}
} catch (e) {
}
}
} else {
/* Do not display button for same language */
// syncList.remove(value);
var index = $scope.syncList.indexOf(value);
if (index > -1) {
$scope.syncList.splice(index, 1);
}
}
}
}
});
});
/* V 1.1:2 = Reply Sync button for multi source translation */
} catch(e){
console.log(e);
}
};
if((rContent != undefined) && (rContent != "")) {
drawCanvas(decodeURIComponent(rContent));
/** Update variable with selected language code **/
$scope.previousSelCode = code_l;
}
};
/**
* @function manageTranslation
* @description Managess the translation of given language for the thread
* @param {string} langCode - Language Code
* @param {string} tid - Thread ID
*/
$scope.manageTranslation = function (langCode, tid) {
//debugger;
$scope.showTrText = false;
/* V 2.0:5 = actualStatus variable introduced to indicate detailed connector status on UI. This variable holds the actual translation percentage */
$scope.transPercent = "";
$scope.actualStatus = "";
if (tid != "") {
var bulkTranslation = lithiumPlugin.bulkTranslation(langCode, tid);
bulkTranslation.then(function (trContent) {
if(trContent.body != "") {
$scope.showPreview(trContent.body, $scope.mszList, langCode);
if(langCode != "en-US") {
$scope.showTrText = true;
}
}
if((trContent.status != "NA") && trContent.status != null) {
// $scope.transPercent = String(trContent.status);
$scope.actualStatus = String(trContent.status);
} else {
// $rootScope.errorMsg = "Translation is in progress. Please check again a few minutes."
$rootScope.errorMsg = "Translation is in progress. Please retry in a few minutes."
}
$scope.workbench = trContent.wb;
/* V 2.0:4 = Trigger uncalled or delayed callbacks (documnet uploaded/translation completed from lithium).*/
if(trContent.callback == 'true') {
var trCompletCallback = lithiumPlugin.trCompletCallback(langCode, trContent.docID);
trCompletCallback.then(function (callback){
// $rootScope.errorMsg = "Downloading Translated content in " + langCode + " now. Please check again in a few minutes."
$rootScope.errorMsg = "Uploading content to translate. Please check again in a few minutes."
});
} else if (trContent.callback == 'upload') {
var trCompletUpload = lithiumPlugin.trCompletUpload(langCode, trContent.docID);
trCompletUpload.then(function (callback) {
//$rootScope.errorMsg = "Uploading content to translate. Please check again in a few minutes."
$rootScope.errorMsg = "Uploading content to translate. Please check again in a few minutes."
});
} else if ("many" == "one") {
$scope.updateOOS();
} else if("SmartConx" == "SmartConx"){
if ("many" == "many"){
$scope.updateOOS();
}
}else if ((trContent.status != null) && trContent.status.includes("100")) {
/* If everything fine then only check Out of Sync status */
$scope.updateOOS();
} else {
/* If translation perccent is less than 100 then show the percentage on UI */
$scope.transPercent = $scope.actualStatus;
}
});
}
}
/**
* @function selectThisLang
* @description Called on select dropdown.
* @param {string} lang - Language code
*
*/
$scope.selectThisLang = function (lang, anonymousFlag) {
/* 1.4:3 Update Analytics on language selection */
try {
lingoThreadLangSelected(lang, '1148058');
} catch (e) {
}
/** Display Translated content **/
var getTranslation = lithiumPlugin.getTranslation(lang, "1148058");
getTranslation.then(function (trContent) {
if (trContent.body != "") {
$scope.showPreview(trContent.body, $scope.mszList, lang);
} else {
//$rootScope.errorMsg = "Translation is in progress. Please check again in a few minutes."
$rootScope.errorMsg = "Translation is in progress. Please retry in a few minutes."
}
});
};
var decodeEntities = (function() {
// this prevents any overhead from creating the object each time
var element = document.createElement('div');
function decodeHTMLEntities (str) {
if(str && typeof str === 'string') {
// strip script/html tags
str = str.replace(/