private static IFeatureClass GetFeatureClass(string GDBFilename, string Dataset, ISpatialReference DatasetSpatialReference) { IWorkspaceFactory wsf = null; IWorkspace datasetWorkspace = null; IFeatureClass objectiveFeatureClass = null; try { wsf = new FileGDBWorkspaceFactoryClass(); datasetWorkspace = wsf.OpenFromFile(GDBFilename, 0); if (!((IWorkspace2)datasetWorkspace).get_NameExists(esriDatasetType.esriDTFeatureClass, Dataset)) objectiveFeatureClass = CreateFeatureClass(Dataset, DatasetSpatialReference, datasetWorkspace); else objectiveFeatureClass = ((IFeatureWorkspace)datasetWorkspace).OpenFeatureClass(Dataset); } finally { if( wsf != null ) Marshal.ReleaseComObject(wsf); if( datasetWorkspace != null ) Marshal.ReleaseComObject(datasetWorkspace); } return objectiveFeatureClass; } private static IFeatureClass CreateFeatureClass(string Dataset, ISpatialReference DatasetSpatialReference, IWorkspace datasetWorkspace) { IFeatureClass objectiveFeatureClass = null; IField shapeField = null; IGeometryDef geometryDef = null; IField oidField = null; IFields fcFields = null; IEnumFieldError enumFieldError = null; IFields validatedFields = null; IFieldChecker fieldChecker = null; IObjectClassDescription objectDescription = null; try { shapeField = new FieldClass(); IFieldEdit fieldEdit = (IFieldEdit)shapeField; fieldEdit.Name_2 = "Shape"; fieldEdit.Type_2 = esriFieldType.esriFieldTypeGeometry; geometryDef = new GeometryDefClass(); IGeometryDefEdit geometryDefEdit = (IGeometryDefEdit)geometryDef; geometryDefEdit.GeometryType_2 = esriGeometryType.esriGeometryMultiPatch; geometryDefEdit.GridCount_2 = 1; geometryDefEdit.set_GridSize(0, 0); geometryDefEdit.HasZ_2 = true; geometryDefEdit.HasM_2 = false; geometryDefEdit.SpatialReference_2 = (DatasetSpatialReference != null) ? DatasetSpatialReference : geometryDefEdit.SpatialReference_2 = new UnknownCoordinateSystemClass(); fieldEdit.GeometryDef_2 = geometryDef; oidField = new FieldClass(); fieldEdit = (IFieldEdit)oidField; fieldEdit.Name_2 = "OID"; fieldEdit.Type_2 = esriFieldType.esriFieldTypeOID; fieldEdit.Precision_2 = 0; fcFields = new FieldsClass(); IFieldsEdit fcFieldsEdit = (IFieldsEdit)fcFields; fcFieldsEdit.AddField(shapeField); fcFieldsEdit.AddField(oidField); fieldChecker = new FieldCheckerClass(); fieldChecker.ValidateWorkspace = datasetWorkspace; fieldChecker.Validate(fcFields, out enumFieldError, out validatedFields); objectDescription = new FeatureClassDescriptionClass(); objectiveFeatureClass = ((IFeatureWorkspace)datasetWorkspace).CreateFeatureClass( Dataset, validatedFields, null, objectDescription.ClassExtensionCLSID, esriFeatureType.esriFTSimple, shapeField.Name, ""); } finally { if (shapeField != null) Marshal.ReleaseComObject(shapeField); if (geometryDef != null) Marshal.ReleaseComObject(geometryDef); if (oidField != null) Marshal.ReleaseComObject(oidField); if (fcFields != null) Marshal.ReleaseComObject(fcFields); if (enumFieldError != null) Marshal.ReleaseComObject(enumFieldError); if (validatedFields != null) Marshal.ReleaseComObject(validatedFields); if (fieldChecker != null) Marshal.ReleaseComObject(fieldChecker); if (objectDescription != null) Marshal.ReleaseComObject(objectDescription); } return objectiveFeatureClass; } }
Here is part 2:private static IFeatureClass GetFeatureClass(string GDBFilename, string Dataset, ISpatialReference DatasetSpatialReference) { IWorkspaceFactory wsf = null; IWorkspace datasetWorkspace = null; IFeatureClass objectiveFeatureClass = null; try { wsf = new FileGDBWorkspaceFactoryClass(); datasetWorkspace = wsf.OpenFromFile(GDBFilename, 0); if (!((IWorkspace2)datasetWorkspace).get_NameExists(esriDatasetType.esriDTFeatureClass, Dataset)) objectiveFeatureClass = CreateFeatureClass(Dataset, DatasetSpatialReference, datasetWorkspace); else objectiveFeatureClass = ((IFeatureWorkspace)datasetWorkspace).OpenFeatureClass(Dataset); } finally { if( wsf != null ) Marshal.ReleaseComObject(wsf); if( datasetWorkspace != null ) Marshal.ReleaseComObject(datasetWorkspace); } return objectiveFeatureClass; } private static IFeatureClass CreateFeatureClass(string Dataset, ISpatialReference DatasetSpatialReference, IWorkspace datasetWorkspace) { IFeatureClass objectiveFeatureClass = null; IField shapeField = null; IGeometryDef geometryDef = null; IField oidField = null; IFields fcFields = null; IEnumFieldError enumFieldError = null; IFields validatedFields = null; IFieldChecker fieldChecker = null; IObjectClassDescription objectDescription = null; try { shapeField = new FieldClass(); IFieldEdit fieldEdit = (IFieldEdit)shapeField; fieldEdit.Name_2 = "Shape"; fieldEdit.Type_2 = esriFieldType.esriFieldTypeGeometry; geometryDef = new GeometryDefClass(); IGeometryDefEdit geometryDefEdit = (IGeometryDefEdit)geometryDef; geometryDefEdit.GeometryType_2 = esriGeometryType.esriGeometryMultiPatch; geometryDefEdit.GridCount_2 = 1; geometryDefEdit.set_GridSize(0, 0); geometryDefEdit.HasZ_2 = true; geometryDefEdit.HasM_2 = false; geometryDefEdit.SpatialReference_2 = (DatasetSpatialReference != null) ? DatasetSpatialReference : geometryDefEdit.SpatialReference_2 = new UnknownCoordinateSystemClass(); fieldEdit.GeometryDef_2 = geometryDef; oidField = new FieldClass(); fieldEdit = (IFieldEdit)oidField; fieldEdit.Name_2 = "OID"; fieldEdit.Type_2 = esriFieldType.esriFieldTypeOID; fieldEdit.Precision_2 = 0; fcFields = new FieldsClass(); IFieldsEdit fcFieldsEdit = (IFieldsEdit)fcFields; fcFieldsEdit.AddField(shapeField); fcFieldsEdit.AddField(oidField); fieldChecker = new FieldCheckerClass(); fieldChecker.ValidateWorkspace = datasetWorkspace; fieldChecker.Validate(fcFields, out enumFieldError, out validatedFields); objectDescription = new FeatureClassDescriptionClass(); objectiveFeatureClass = ((IFeatureWorkspace)datasetWorkspace).CreateFeatureClass( Dataset, validatedFields, null, objectDescription.ClassExtensionCLSID, esriFeatureType.esriFTSimple, shapeField.Name, ""); } finally { if (shapeField != null) Marshal.ReleaseComObject(shapeField); if (geometryDef != null) Marshal.ReleaseComObject(geometryDef); if (oidField != null) Marshal.ReleaseComObject(oidField); if (fcFields != null) Marshal.ReleaseComObject(fcFields); if (enumFieldError != null) Marshal.ReleaseComObject(enumFieldError); if (validatedFields != null) Marshal.ReleaseComObject(validatedFields); if (fieldChecker != null) Marshal.ReleaseComObject(fieldChecker); if (objectDescription != null) Marshal.ReleaseComObject(objectDescription); } return objectiveFeatureClass; } }
try { row = cursor.NextRow(); while(row != null) { //some logic Marshal.ReleaseComObject(row) row = null; row = cursor.NextRow(); } } finally { if(row =! null) Marshal.ReleaseComObject(row); Marshal.ReleaseComObject(cursor); }
I have to ask because I'm very curious about this, when you are releasing COM objects you always do something like this:try { row = cursor.NextRow(); while(row != null) { //some logic Marshal.ReleaseComObject(row) row = null; row = cursor.NextRow(); } } finally { if(row =! null) Marshal.ReleaseComObject(row); Marshal.ReleaseComObject(cursor); }
Now let's analyze this, in loop you are releasing the row object every time and assinging it's value to null - because as you said/wrote somewhere you want to make sure the row object will be destroyed/released. But what about the finally section: if the row object is not null you are releasing it by ReleaseComObject method...but the ReleaseComObject method only decreases the amount of references by 1, so there is a slight possibility that a row object won't be released completely, right? Should we use the FinalReleaseComObject method then? I know that your code examples for releasing COM objects are helping but I'm just willing to learn as much as I can, since COM objects generate so much problems for .net developers in esri products.
Regards,
MDruzgala
try { // increments RCW's ref count for the current instance of // row by 1 row = cursor.NextRow(); while(row != null) { // some code // decrement RCW's ref count by 1 (it was only incremented // by 1 as seen above) Marshal.ReleaseComObject(row) // set the row equal to null in case the following statements // throw an exception if the an exception is thrown before // row is assigned to a new instance we will enter the // finally-block on our way out of the method. if this happens // and we're still point to the current row its reference count // will be decremented a second time so it will now be -1 row = null; // set row to the next row and increments its RCW's ref // count for the current instance of row by 1 row = cursor.NextRow(); } } finally { // in non-exceptional cases, row will be null and we // will not have to call release. if an exception occurs // and row is not equal to null, then it wasn't released (we // know this because we always set it to null after we release it) if(row != null) Marshal.ReleaseComObject(row); // I don't see where cursor was set... if you don't increment // the RCW's ref count (it was passed in as a parameter from // managed code), you don't decrement it Marshal.ReleaseComObject(cursor); }
Hi, Jason.
I was away on business and couldn't to proceed with testing. So now i able and would like to go on it.
So, I use the included file geodatabase in my test app. I use it in the actual command too. As is easy to see i delete it each execution time and create the new one. In the command i invoke the Import3DFiles method of the test app.
I tried your suggestions to release GDB and IFieldChecker. Also i found out that i open it in the Main procedure. It was commented. All of that reduced the memory increase registered in the umdh log for a half. But it is still almost 250MB (instead of ~500MB).
Also i tried to make the second capture after all releases that decreased the amount of memory with the result "Total decrease ==253812936 requested + 25696 overhead = 253838632". The last portion of the log contains the above-mentioned memory decrease about 250MB. But, it seems to me such a method is not correct as the registered memory decrease produced by the releasing the featurebuffer itself (exactly determined).
Also I executed your code and it finished with no exceptions. BUT!!! You omitted the catch block. At last the main result of the execution and the confirmation of the successful import completion is the GDB featureclass with the imported model within. But the resulting nor mine nor yours GDB does not contain it. If i give the catch block back the exception is registered.
Please check up the GDB featureclass for the imported model. It's table should contain the single row, moreover you should be able to zoom to it and to visualize it in the ArcGlobe app, otherwise the code execution did not succeed.
I attached the umdh logs made with different captures (after all releases, in the catch block - after the InsertFeature exception, after releasing the FeatureBuffer)
Welcome back, Alexey.
I looked at your UMDH logs and I really wasn't able to get anymore information than I already have.
I'm not sure what our configuration differences are, but my code is definitely succeeding. I've attached screenshots and all of the code (without the model.) Perhaps you can try my project and see if you have better luck with it?
Let me know if the screenshots look as you would expect them to (in the case of success.) Also, let me know if the project I sent you does any better.
Thanks!
Hi, Jason!
So what about my achievements in the test.
As would be expected I run your test app and it did not succeeded.
At last i have downloaded and installed the ArcGIS 10.1 with SDK. Both apps (mine and yours) succeeded with it. The model was imported to the GDB featureclass. But it is viewed without textures with the ArcScene and ArcGlobe.
The model imported using the Add-in command in ArcGIS 10 has textures.
It is not a critical issue (i mean the lost textures) since the model has been imported. It more important for me that the import fails with the ArcGIS 10 since I'm constrained to use the 10th version by the customer.
Maybe you have some thoughts else about the problem. Also i think it's a bug that is necessary to report.
Worst of all that such a problem was encountered by me to this moment fairly often with the models of different siz
I think you have found the answer to your own problem. Unfortunately, it looks like they found the problem you're seeing in 10 SP5 and fixed it in 10.1. I guess they could have fixed it without knowing the problem ever existed. I'm still new to ArcGIS, so I don't know how willing ESRI is to fix bugs in previous service packs, especially when they are fixed in later versions.
If this as we both seem to suspect (ArcGIS 10 SP5 has a serious bug; most likely a memory leak), then I don't think there is much left that we can do. To me, it seems your options are to either convince your customer to move to 10.1 (harder than it sounds, I'm sure) or convince ESRI to patch an old version.
Have you compared memory usage between my implementation and yours on 10.1? What are the differences? Probably don't need to use UMDH, just task manager.