diff --git a/GenotypeAssays/resources/views/sbtReview.html b/GenotypeAssays/resources/views/sbtReview.html index 695e7fd1d..5c0b6363e 100644 --- a/GenotypeAssays/resources/views/sbtReview.html +++ b/GenotypeAssays/resources/views/sbtReview.html @@ -159,11 +159,9 @@ //sort: '-percent', containerPath: Laboratory.Utils.getQueryContainerPath(), removeableFilters: [ - LABKEY.Filter.create('percent_from_locus', 0.25, LABKEY.Filter.Types.GTE) + LABKEY.Filter.create('percent_from_locus', 0.25, LABKEY.Filter.Types.GTE), + LABKEY.Filter.create('analysis_id', analysisId, LABKEY.Filter.Types.EQUALS) ], - parameters: { - AnalysisId: analysisId - }, scope: this, success: this.onDataRegionLoad }) @@ -180,12 +178,10 @@ containerPath: Laboratory.Utils.getQueryContainerPath(), removeableFilters: [ LABKEY.Filter.create('percent_from_locus', 0.25, LABKEY.Filter.Types.GTE), - LABKEY.Filter.create('total_reads', 5, LABKEY.Filter.Types.GT), - LABKEY.Filter.create('totalLineages', 1, LABKEY.Filter.Types.EQUAL) + LABKEY.Filter.create('total_reads_from_locus', 5, LABKEY.Filter.Types.GT), + LABKEY.Filter.create('totalLineages', 1, LABKEY.Filter.Types.EQUAL), + LABKEY.Filter.create('analysis_id', analysisId, LABKEY.Filter.Types.EQUAL) ], - parameters: { - AnalysisId: analysisId - }, scope: this, success: this.onDataRegionLoad }) @@ -195,7 +191,7 @@ title: 'Haplotype Matches', items: [{ xtype: 'genotypeassays-haplotypepanel', - analysisId: [analysisId] + analysisIds: [analysisId] }] },{ xtype: 'ldk-querypanel', diff --git a/GenotypeAssays/resources/web/genotypeassays/buttons.js b/GenotypeAssays/resources/web/genotypeassays/buttons.js index f2c022429..6ba13c61d 100644 --- a/GenotypeAssays/resources/web/genotypeassays/buttons.js +++ b/GenotypeAssays/resources/web/genotypeassays/buttons.js @@ -28,7 +28,7 @@ GenotypeAssays.buttons = new function(){ var newForm = Ext4.DomHelper.append(document.getElementsByTagName('body')[0], '
' + - '' + + '' + '
'); newForm.submit(); } diff --git a/GenotypeAssays/resources/web/genotypeassays/panel/HaplotypePanel.js b/GenotypeAssays/resources/web/genotypeassays/panel/HaplotypePanel.js index aacc05bdc..fb690dc31 100644 --- a/GenotypeAssays/resources/web/genotypeassays/panel/HaplotypePanel.js +++ b/GenotypeAssays/resources/web/genotypeassays/panel/HaplotypePanel.js @@ -1,7 +1,7 @@ Ext4.define('GenotypeAssays.panel.HaplotypePanel', { extend: 'Ext.panel.Panel', alias: 'widget.genotypeassays-haplotypepanel', - analysisId: null, + analysisIds: null, showCheckBoxes: false, initComponent: function(){ @@ -461,13 +461,11 @@ Ext4.define('GenotypeAssays.panel.HaplotypePanel', { schemaName: 'sequenceanalysis', queryName: 'alignment_summary_by_lineage', columns: 'analysis_id,analysis_id/readset,analysis_id/readset/subjectId,lineages,loci,total,total_reads,percent,total_reads_from_locus,percent_from_locus', - parameters: { - AnalysisId: this.analysisId - }, apiVersion: 13.2, scope: this, filterArray: [ - LABKEY.Filter.create('percent_from_locus', minPct || 0, LABKEY.Filter.Types.GTE) + LABKEY.Filter.create('percent_from_locus', minPct || 0, LABKEY.Filter.Types.GTE), + LABKEY.Filter.create('analysis_id', this.analysisIds, LABKEY.Filter.Types.IN) ], failure: LDK.Utils.getErrorCallback(), success: function(results){ @@ -501,13 +499,13 @@ Ext4.define('GenotypeAssays.panel.HaplotypePanel', { schemaName: 'sequenceanalysis', queryName: 'alignment_summary_grouped', columns: 'analysis_id,lineages,loci,alleles,total_reads,percent,total_reads_from_locus,percent_from_locus', + filterArray: [ + LABKEY.Filter.create('analysis_id', this.analysisIds, LABKEY.Filter.Types.IN) + ], // This is designed to remove the view-level sorts: sort: 'analysis_id', apiVersion: 13.2, scope: this, - parameters: { - AnalysisId: this.analysisId - }, failure: LDK.Utils.getErrorCallback(), success: function(results){ this.lineageToAlleleMap = {}; diff --git a/GenotypeAssays/resources/web/genotypeassays/window/PublishResultsWindow.js b/GenotypeAssays/resources/web/genotypeassays/window/PublishResultsWindow.js index d2969f4b3..7be553457 100644 --- a/GenotypeAssays/resources/web/genotypeassays/window/PublishResultsWindow.js +++ b/GenotypeAssays/resources/web/genotypeassays/window/PublishResultsWindow.js @@ -11,17 +11,8 @@ Ext4.define('GenotypeAssays.window.PublishResultsWindow', { return; } - const analysisId = dr.getParameters()?.AnalysisId; - if (!analysisId) { - Ext4.Msg.alert('Error', 'Error: unable to find analysisId. This should not occur.'); - LDK.Assert.assertNotEmpty('Unable to find AnalysisId parameter from the DataRegion in PublishResultsWindow'); - - return; - } - Ext4.create('GenotypeAssays.window.PublishResultsWindow', { dataRegionName: dataRegionName, - analysisId: analysisId, actionName: 'cacheAnalyses' }).show(); } @@ -116,7 +107,6 @@ Ext4.define('GenotypeAssays.window.PublishResultsWindow', { scope: this, jsonData: { alleleNames: alleleNames, - analysisId: this.analysisId, json: Ext4.encode(this.json), protocolId: protocol }, diff --git a/GenotypeAssays/src/org/labkey/genotypeassays/GenotypeAssaysController.java b/GenotypeAssays/src/org/labkey/genotypeassays/GenotypeAssaysController.java index eb14a4194..b2b6cf872 100644 --- a/GenotypeAssays/src/org/labkey/genotypeassays/GenotypeAssaysController.java +++ b/GenotypeAssays/src/org/labkey/genotypeassays/GenotypeAssaysController.java @@ -24,6 +24,7 @@ import org.labkey.api.action.MutatingApiAction; import org.labkey.api.action.SpringActionController; import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbSchemaType; import org.labkey.api.data.SqlExecutor; import org.labkey.api.data.TableInfo; import org.labkey.api.exp.api.ExpProtocol; @@ -31,6 +32,7 @@ import org.labkey.api.security.RequiresPermission; import org.labkey.api.security.permissions.ReadPermission; import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.HtmlString; import org.labkey.api.util.Pair; import org.labkey.api.util.URLHelper; import org.labkey.api.view.HtmlView; @@ -53,7 +55,7 @@ public GenotypeAssaysController() } @RequiresPermission(ReadPermission.class) - public class MigrateLegacySSPAction extends ConfirmAction + public static class MigrateLegacySSPAction extends ConfirmAction { @Override public void validateCommand(Object form, Errors errors) @@ -64,11 +66,11 @@ public void validateCommand(Object form, Errors errors) @Override public ModelAndView getConfirmView(Object form, BindException errors) throws Exception { - DbSchema schema = DbSchema.get("SSP_Assay"); + DbSchema schema = DbSchema.get("SSP_Assay", DbSchemaType.Module); if (schema == null) - return new HtmlView("Either the legacy SSP module has not been installed, or it has already been removed"); + return new HtmlView(HtmlString.of("Either the legacy SSP module has not been installed, or it has already been removed")); else - return new HtmlView("This allows an admin to copy any primers stored in the original SSP Assay module into the new genotyping module. Any data has already been copied. Do you want to continue?"); + return new HtmlView(HtmlString.of("This allows an admin to copy any primers stored in the original SSP Assay module into the new genotyping module. Any data has already been copied. Do you want to continue?")); } @Override @@ -76,7 +78,7 @@ public boolean handlePost(Object form, BindException errors) throws Exception { try { - DbSchema schema = DbSchema.get("SSP_Assay"); + DbSchema schema = DbSchema.get("SSP_Assay", DbSchemaType.Module); if (schema == null) return true; //module not installed @@ -113,7 +115,7 @@ public URLHelper getSuccessURL(Object form) } @RequiresPermission(UpdatePermission.class) - public class CacheAnalysesAction extends MutatingApiAction + public static class CacheAnalysesAction extends MutatingApiAction { @Override public ApiResponse execute(CacheAnalysesForm form, BindException errors) @@ -133,7 +135,7 @@ public ApiResponse execute(CacheAnalysesForm form, BindException errors) } String[] alleleNames = Arrays.stream(form.getAlleleNames()).map(StringEscapeUtils::unescapeHtml4).toArray(String[]::new); - Pair, List> ret = GenotypeAssaysManager.get().cacheAnalyses(getViewContext(), form.getAnalysisId(), protocol, alleleNames); + Pair, List> ret = GenotypeAssaysManager.get().cacheAnalyses(getViewContext(), protocol, alleleNames); resultProperties.put("runsCreated", ret.first); resultProperties.put("runsDeleted", ret.second); } @@ -159,7 +161,6 @@ public static class CacheAnalysesForm { private String[] _alleleNames; private String _json; - private int _analysisId; private int _protocolId; public String[] getAlleleNames() @@ -191,20 +192,10 @@ public void setJson(String json) { _json = json; } - - public int getAnalysisId() - { - return _analysisId; - } - - public void setAnalysisId(int analysisId) - { - _analysisId = analysisId; - } } @RequiresPermission(UpdatePermission.class) - public class CacheHaplotypesAction extends MutatingApiAction + public static class CacheHaplotypesAction extends MutatingApiAction { @Override public ApiResponse execute(CacheAnalysesForm form, BindException errors) diff --git a/GenotypeAssays/src/org/labkey/genotypeassays/GenotypeAssaysManager.java b/GenotypeAssays/src/org/labkey/genotypeassays/GenotypeAssaysManager.java index a4425d0a0..ba6e35d8e 100644 --- a/GenotypeAssays/src/org/labkey/genotypeassays/GenotypeAssaysManager.java +++ b/GenotypeAssays/src/org/labkey/genotypeassays/GenotypeAssaysManager.java @@ -90,7 +90,7 @@ public static GenotypeAssaysManager get() return _instance; } - public Pair, List> cacheAnalyses(final ViewContext ctx, final int analysisId, final ExpProtocol protocol, String[] pks) throws IllegalArgumentException + public Pair, List> cacheAnalyses(final ViewContext ctx, final ExpProtocol protocol, String[] pks) throws IllegalArgumentException { final User u = ctx.getUser(); final List runsCreated = new ArrayList<>(); @@ -125,7 +125,6 @@ public Pair, List> cacheAnalyses(final ViewContext ctx, final i AtomicInteger records = new AtomicInteger(); TableSelector tsAlignments = new TableSelector(tableAlignments, cols.values(), new SimpleFilter(FieldKey.fromString("key"), Arrays.asList(pks), CompareType.IN), null); - tsAlignments.setNamedParameters(Map.of("AnalysisId", analysisId)); tsAlignments.forEach(new Selector.ForEachBlock() { diff --git a/SivStudies/resources/queries/study/demographics/Expanded Vaccine Detail.qview.xml b/SivStudies/resources/queries/study/demographics/Expanded Vaccine Detail.qview.xml index 5734403db..c1c0512ed 100644 --- a/SivStudies/resources/queries/study/demographics/Expanded Vaccine Detail.qview.xml +++ b/SivStudies/resources/queries/study/demographics/Expanded Vaccine Detail.qview.xml @@ -14,6 +14,7 @@ + diff --git a/SivStudies/src/org/labkey/sivstudies/etl/PerformManualIdrStepsTask.java b/SivStudies/src/org/labkey/sivstudies/etl/PerformManualIdrStepsTask.java index f3b976a19..e122d7250 100644 --- a/SivStudies/src/org/labkey/sivstudies/etl/PerformManualIdrStepsTask.java +++ b/SivStudies/src/org/labkey/sivstudies/etl/PerformManualIdrStepsTask.java @@ -294,7 +294,7 @@ public void setContainerUser(ContainerUser containerUser) private void updateJS46() throws PipelineJobException { - updateTreatmentRecords("JS46", new SimpleFilter(FieldKey.fromString("treatment"), "SIV - Unknown"), Map.of("treatment", "SIVmac239", "route", "IV")); + updateTreatmentRecords("JS46", new SimpleFilter(FieldKey.fromString("treatment"), "SIV - Unknown"), Map.of("treatment", "SIVmac239", "route", "Rectal", "amount", 1500, "amount_units", "TCID50")); } private void updateTreatmentRecords(String cohortName, SimpleFilter treatmentFilter, final Map additionalProps) throws PipelineJobException diff --git a/SivStudies/src/org/labkey/sivstudies/query/SivStudiesCustomizer.java b/SivStudies/src/org/labkey/sivstudies/query/SivStudiesCustomizer.java index 0f0894906..d6b4953ec 100644 --- a/SivStudies/src/org/labkey/sivstudies/query/SivStudiesCustomizer.java +++ b/SivStudies/src/org/labkey/sivstudies/query/SivStudiesCustomizer.java @@ -464,9 +464,11 @@ public TableInfo getLookupTableInfo() qd.setSql("SELECT\n" + "min(tr.date) as artInitiation,\n" + "CONVERT(TIMESTAMPDIFF('SQL_TSI_DAY', CAST(min(tr.date) AS DATE), CAST(c." + dateColName + " AS DATE)), INTEGER) as daysPostArtInitiation,\n" + + "CONVERT(TIMESTAMPDIFF('SQL_TSI_DAY', CAST(min(tr.date) AS DATE), CAST(c." + dateColName + " AS DATE)) / 7, INTEGER) as weeksPostArtInitiation,\n" + "CONVERT(age_in_months(CAST(min(tr.date) AS DATE), CAST(c." + dateColName + " AS DATE)), FLOAT) as monthsPostArtInitiation,\n" + "max(tr.enddate) as artRelease,\n" + "CONVERT(CASE WHEN max(tr.enddate) IS NULL THEN NULL ELSE TIMESTAMPDIFF('SQL_TSI_DAY', CAST(max(tr.enddate) AS DATE), CAST(c." + dateColName + " AS DATE)) END, INTEGER) as daysPostArtRelease,\n" + + "CONVERT(CASE WHEN max(tr.enddate) IS NULL THEN NULL ELSE TIMESTAMPDIFF('SQL_TSI_DAY', CAST(max(tr.enddate) AS DATE), CAST(c." + dateColName + " AS DATE)) END / 7, INTEGER) as weeksPostArtRelease,\n" + "CONVERT(CASE WHEN max(tr.enddate) IS NULL THEN NULL ELSE age_in_months(CAST(max(tr.enddate) AS DATE), CAST(c." + dateColName + " AS DATE)) END, FLOAT) as monthsPostArtRelease,\n" + "CAST(CASE WHEN CAST(min(tr.date) AS DATE) <= CAST(c." + dateCol.getFieldKey().toString() + " AS DATE) AND CAST(max(coalesce(tr.enddate, now())) AS DATE) >= CAST(c." + dateCol.getFieldKey().toString() + " AS DATE) THEN 'Y' ELSE null END as VARCHAR) as onArt,\n" + "GROUP_CONCAT(DISTINCT tr.treatment) AS artTreatment,\n" + @@ -499,9 +501,11 @@ public TableInfo getLookupTableInfo() ((BaseColumnInfo)ti.getColumn("artRelease")).setLabel("ART Release"); ((BaseColumnInfo)ti.getColumn("daysPostArtInitiation")).setLabel("Days Post-ART Initiation"); + ((BaseColumnInfo)ti.getColumn("weeksPostArtInitiation")).setLabel("Weeks Post-ART Initiation"); ((BaseColumnInfo)ti.getColumn("monthsPostArtInitiation")).setLabel("Months Post-ART Initiation"); ((BaseColumnInfo)ti.getColumn("daysPostArtRelease")).setLabel("Days Post-ART Release"); + ((BaseColumnInfo)ti.getColumn("weeksPostArtRelease")).setLabel("Weeks Post-ART Release"); ((BaseColumnInfo)ti.getColumn("monthsPostArtRelease")).setLabel("Months Post-ART Release"); ((BaseColumnInfo)ti.getColumn("artTreatment")).setLabel("ART Treatment(s)"); diff --git a/mGAP/src/org/labkey/mgap/mGAPModule.java b/mGAP/src/org/labkey/mgap/mGAPModule.java index b702a8308..f46733b0a 100644 --- a/mGAP/src/org/labkey/mgap/mGAPModule.java +++ b/mGAP/src/org/labkey/mgap/mGAPModule.java @@ -110,7 +110,7 @@ public void doStartupAfterSpringConfig(ModuleContext moduleContext) ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Connection, "https://code.jquery.com", "https://*.fontawesome.com"); ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Style, "https://code.jquery.com", "https://www.gstatic.com"); - ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Font, "https://*.fontawesome.com"); + ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Font, "https://*.fontawesome.com", "https://fonts.googleapis.com"); ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Connection, "https://oss.maxcdn.com"); new PipelineStartup(); diff --git a/mGAP/src/org/labkey/mgap/pipeline/GeographicOriginStep.java b/mGAP/src/org/labkey/mgap/pipeline/GeographicOriginStep.java index b6da9bf1c..34cf46562 100644 --- a/mGAP/src/org/labkey/mgap/pipeline/GeographicOriginStep.java +++ b/mGAP/src/org/labkey/mgap/pipeline/GeographicOriginStep.java @@ -86,6 +86,7 @@ public void complete(PipelineJob job, List inputs, List> toInsert = new ArrayList<>(); try (CSVReader reader = new CSVReader(Readers.getReader(so.getFile()), '\t')) { @@ -120,6 +121,10 @@ public void complete(PipelineJob job, List inputs, List + + + + + + Copy to target @@ -149,7 +155,7 @@ objectid - + diff --git a/mcc/resources/queries/study/demographicsMostRecentDeparture.query.xml b/mcc/resources/queries/study/demographicsMostRecentDeparture.query.xml index 7136f001a..69afb887b 100644 --- a/mcc/resources/queries/study/demographicsMostRecentDeparture.query.xml +++ b/mcc/resources/queries/study/demographicsMostRecentDeparture.query.xml @@ -14,6 +14,9 @@ query.sort=-Date& + + Destination(s) + MCC Request Id(s) diff --git a/mcc/resources/queries/study/demographicsMostRecentDeparture.sql b/mcc/resources/queries/study/demographicsMostRecentDeparture.sql index f504e2dee..b3a5b12f6 100644 --- a/mcc/resources/queries/study/demographicsMostRecentDeparture.sql +++ b/mcc/resources/queries/study/demographicsMostRecentDeparture.sql @@ -2,6 +2,7 @@ select T1.Id, max(T1.date) as MostRecentDeparture, + group_concat(distinct t1.destination) as destination, group_concat(distinct t1.mccRequestId) as mccRequestId FROM study.departure T1 diff --git a/mcc/resources/views/mccDataImport.view.xml b/mcc/resources/views/mccDataImport.view.xml index 8418d6d37..63e81ecc3 100644 --- a/mcc/resources/views/mccDataImport.view.xml +++ b/mcc/resources/views/mccDataImport.view.xml @@ -19,12 +19,5 @@ - - - - - - - \ No newline at end of file diff --git a/mcc/resources/web/mcc/exampleData/MCC_Data_Template.xlsx b/mcc/resources/web/mcc/exampleData/MCC_Data_Template.xlsx index 916878789..26b4e6ee8 100644 Binary files a/mcc/resources/web/mcc/exampleData/MCC_Data_Template.xlsx and b/mcc/resources/web/mcc/exampleData/MCC_Data_Template.xlsx differ diff --git a/mcc/resources/web/mcc/panel/MccImportPanel.js b/mcc/resources/web/mcc/panel/MccImportPanel.js index e00d01a6e..797829bd9 100644 --- a/mcc/resources/web/mcc/panel/MccImportPanel.js +++ b/mcc/resources/web/mcc/panel/MccImportPanel.js @@ -29,7 +29,7 @@ Ext4.define('MCC.panel.MccImportPanel', { allowBlank: true },{ name: 'Id', - labels: ['Id', 'animal ID', 'AnimalId', 'MarmId', 'Marm Id'], + labels: ['Id', 'animal ID', 'AnimalId', 'MarmId', 'Marm Id', 'Center Id'], allowRowSpan: false, alwaysShow: true, transform: 'animalId', @@ -37,7 +37,7 @@ Ext4.define('MCC.panel.MccImportPanel', { expectInImport: true },{ name: 'alternateIds', - labels: ['Alternate Ids', 'previous Ids'], + labels: ['Alternate Ids', 'AlternateIds', 'previous Ids'], allowRowSpan: false, alwaysShow: true, transform: 'alternateIds', @@ -121,6 +121,13 @@ Ext4.define('MCC.panel.MccImportPanel', { alwaysShow: true, allowBlank: false, transform: 'date' + },{ + name: 'mccAlias', + labels: ['mccAlias', 'MCC ID', 'mccId', 'MCC_ID'], + alwaysShow: true, + allowRowSpan: false, + allowBlank: true, + expectInImport: false },{ name: 'u24_status', labels: ['U24 status'], @@ -131,8 +138,8 @@ Ext4.define('MCC.panel.MccImportPanel', { expectInImport: true },{ name: 'availability', - // NOTE: availalble was a typo in one generation of the input templates: - labels: ['Available to Transfer', 'available to transfer', 'availalble to transfer'], + // NOTE: available was a typo in one generation of the input templates: + labels: ['Available to Transfer', 'available to transfer', 'available to transfer'], allowRowSpan: false, allowBlank: true, transform: 'available', @@ -389,16 +396,95 @@ Ext4.define('MCC.panel.MccImportPanel', { getPanelItems: function(){ return [{ style: 'padding-top: 10px;', - html: 'This page is designed to help import MCC animal-level data. Use the fields below to download the excel template and paste data to import.

' + html: 'This page is designed to help import MCC animal-level data. Use the fields below to download the excel template and paste data to import. The general idea is: 1) Download a blank excel template. This excel workbook contains dropdowns, etc., 2) Use the second button to download a table with the current data for the selected colony. 3) Copy/paste that raw data into the template.

' },{ layout: 'hbox', style: 'margin-bottom: 20px;', items: [{ xtype: 'button', - text: 'Download Template', + text: 'Download Blank Template', border: true, scope: this, href: LABKEY.ActionURL.getContextPath() + '/mcc/exampleData/MCC_Data_Template.xlsx' + },{ + xtype: 'button', + text: 'Download Template Data', + style: 'padding-left: 5px', + border: true, + scope: this, + handler: function(btn){ + var colonyName = btn.up('mcc-mccimportpanel').down('#centerName').getValue() + if (!colonyName) { + Ext4.Msg.alert('Error', 'Must enter the colony name') + return + } + + Ext4.Msg.wait('Loading...'); + var fieldMap = { + 'Id/mccAlias/externalAlias': 'MCC_ID', + 'Id': 'Center Id', + 'alternateIds': 'Previous IDs', + 'colony': 'Current Colony', + 'source': 'Source Colony', + 'gender': 'Sex', + 'birth': 'Birth', + 'calculated_status': 'status', + 'Id/MostRecentDeparture/destination': 'Shipping Destination', + 'Id/MostRecentDeparture/MostRecentDeparture': 'Shipping Date', + 'death': 'Death', + 'deathCause': 'Cause of Death', + 'dam': 'Material ID', + 'sire': 'Paternal ID', + 'Id/MostRecentWeight/MostRecentWeightGrams': 'Weight (g)', + 'Id/MostRecentWeight/MostRecentWeightDate': 'Date of Weight', + 'u24_status': 'U24 Status', + 'Id/mostRecentObservations/availability::observation': 'Availability', + 'Id/mostRecentObservations/current_housing_status::observation': 'Current Housing Status', + 'breeding partner ID': 'Breeding Partner ID', + 'Id/mostRecentObservations/infant_history::observation': 'Infant History', + 'Id/mostRecentObservations/fertility_status::observation': 'Fertility Status', + 'Id/mostRecentObservations/medical_history::observation': 'Medical History' + } + + LABKEY.Query.selectRows({ + schemaName: 'study', + queryName: 'demographics', + columns: Object.keys(fieldMap).join(','), + scope: this, + failure: LDK.Utils.getErrorCallback(), + filterArray: [ + LABKEY.Filter.create('colony', colonyName), + LABKEY.Filter.create('calculated_status', 'Alive') + ], + success: function (results) { + Ext4.Msg.hide(); + + const rows = results.rows.map(row => { + const newRow = [] + Object.keys(fieldMap).forEach(key => { + // Always leave these empty: + if (key === 'Id/MostRecentWeight/MostRecentWeightGrams' || key === 'Id/MostRecentWeight/MostRecentWeightDate') { + newRow.push('') + } else if (row[key] !== undefined) { + newRow.push(Ext4.isArray(row[key]) ? row[key].join(',') : row[key]) + } else { + newRow.push('') + } + }) + + return newRow + }) + + LABKEY.Utils.convertToExcel({ + fileName : 'MCC_Import_' + colonyName + '.xlsx', + sheets : [{ + name: 'data', + data: [Object.values(fieldMap)].concat(rows) + }] + }); + } + }); + } }] },{ xtype: 'datefield', @@ -480,7 +566,7 @@ Ext4.define('MCC.panel.MccImportPanel', { LABKEY.Query.selectRows({ schemaName: 'study', queryName: 'demographics', - columns: 'Id,alternateIds,dam,sire,birth,death,colony,objectid,lsid,mccAlias/externalId,Id/death/date,Id/MostRecentDeparture/MostRecentDeparture', + columns: 'Id,alternateIds,dam,sire,birth,death,colony,objectid,lsid,Id/mccAlias/externalAlias,Id/death/date,Id/MostRecentDeparture/MostRecentDeparture', scope: this, failure: LDK.Utils.getErrorCallback(), success: function(results) { @@ -523,13 +609,15 @@ Ext4.define('MCC.panel.MccImportPanel', { row.existingRecord = row.Id && demographicsRecords.allIds.indexOf(row.Id.toLowerCase()) > -1; if (row.existingRecord) { var existingRecord = demographicsRecords.rowMap[row.Id.toLowerCase()]; + existingRecord.mccAlias = existingRecord['Id/mccAlias/externalAlias'] + if (existingRecord.colony !== row.colony) { row.errors.push('Colony does not match existing row: ' + existingRecord.colony); } else { row.objectId = existingRecord.objectid; - var fields = ['birth', 'dam', 'sire', 'source']; + var fields = ['birth', 'dam', 'sire', 'source', 'mccAlias']; for (var idx in fields) { var fn = fields[idx]; @@ -1110,6 +1198,7 @@ Ext4.define('MCC.panel.MccImportPanel', { Ext4.Array.forEach(rawData, function(row){ if (row.existingRecord) { + // Note: this was merged with the existing values upstream of this demographicsUpdates.push({ Id: row.Id, date: row.date, diff --git a/mcc/resources/web/mcc/window/MarkShippedWindow.js b/mcc/resources/web/mcc/window/MarkShippedWindow.js index 9cbf6818e..a6c03a09c 100644 --- a/mcc/resources/web/mcc/window/MarkShippedWindow.js +++ b/mcc/resources/web/mcc/window/MarkShippedWindow.js @@ -32,6 +32,11 @@ Ext4.define('MCC.window.MarkShippedWindow', { html: 'This will:
1) Mark the selected animals as shipped from this center
2) Enter a new demographics record in the selected study
3) Preserve the MCC ID for each animal.', border: false, style: 'padding-bottom: 10px;' + },{ + xtype: 'ldk-integerfield', + fieldLabel: 'Request ID', + itemId: 'requestId', + allowBlank: false },{ xtype: 'datefield', fieldLabel: 'Effective Date', @@ -61,7 +66,7 @@ Ext4.define('MCC.window.MarkShippedWindow', { fieldLabel: 'Target Folder', itemId: 'targetFolder', allowBlank: false, - displayField: 'Name', + displayField: 'DisplayName', valueField: 'Path', triggerAction: 'all', queryMode: 'local', @@ -71,9 +76,9 @@ Ext4.define('MCC.window.MarkShippedWindow', { containerPath: ctx.MCCInternalDataContainer, schemaName: 'core', queryName: 'containers', - columns: 'EntityId,Name,Parent,Path', + columns: 'EntityId,DisplayName,Parent,Path', containerFilter: 'CurrentAndSubfolders', - sort: 'Name', + sort: 'DisplayName', autoLoad: true, listeners: { load: function(store) { @@ -111,10 +116,6 @@ Ext4.define('MCC.window.MarkShippedWindow', { xtype: 'displayfield', value: 'Animal ID', width: 150 - },{ - xtype: 'displayfield', - value: 'Request ID', - width: 150 },{ xtype: 'displayfield', value: 'Keep Existing ID?', @@ -129,11 +130,6 @@ Ext4.define('MCC.window.MarkShippedWindow', { fields = fields.concat([{ xtype: 'displayfield', value: animalId, - },{ - xtype: 'ldk-integerfield', - minValue: 1, - itemId: 'requestId-' + animalId, - allowBlank: true },{ xtype: 'checkbox', itemId: 'usePreviousId-' + animalId, @@ -158,7 +154,7 @@ Ext4.define('MCC.window.MarkShippedWindow', { return { layout: { type: 'table', - columns: 4 + columns: 3 }, width: 600, border: false, @@ -183,12 +179,13 @@ Ext4.define('MCC.window.MarkShippedWindow', { var win = btn.up('window'); var lsids = win.rowIds; + var requestId = win.down('#requestId').getValue(); var effectiveDate = win.down('#effectiveDate').getValue(); var centerName = win.down('#centerName').getValue(); var targetFolder = win.down('#targetFolder').getValue(); - if (!effectiveDate || !centerName || !targetFolder) { - Ext4.Msg.alert('Error', 'Must provide date, center name, and target folder'); + if (!requestId || !effectiveDate || !centerName || !targetFolder) { + Ext4.Msg.alert('Error', 'Must provide request Id, date, center name, and target folder'); return; } @@ -250,6 +247,7 @@ Ext4.define('MCC.window.MarkShippedWindow', { }, doSave: function(win, results, preexistingIdsInTargetFolder){ + var requestId = win.down('#requestId').getValue(); var effectiveDate = win.down('#effectiveDate').getValue(); var centerName = win.down('#centerName').getValue(); var targetFolder = win.down('#targetFolder').getValue(); @@ -259,7 +257,6 @@ Ext4.define('MCC.window.MarkShippedWindow', { var hadError = false; Ext4.Array.forEach(results.rows, function(row){ var effectiveId = win.down('#usePreviousId-' + row.Id).getValue() ? row.Id : win.down('#newId-' + row.Id).getValue(); - var requestId = win.down('#requestId-' + row.Id).getValue(); // This should be checked above, although perhaps case sensitivity could get involved: LDK.Assert.assertNotEmpty('Missing effective ID after query', effectiveId); diff --git a/mcc/src/org/labkey/mcc/MccModule.java b/mcc/src/org/labkey/mcc/MccModule.java index 11292760b..638a94fc6 100644 --- a/mcc/src/org/labkey/mcc/MccModule.java +++ b/mcc/src/org/labkey/mcc/MccModule.java @@ -138,9 +138,10 @@ protected void doStartupAfterSpringConfig(ModuleContext moduleContext) SystemMaintenance.addTask(new MccMaintenanceTask()); - ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Connection, "https://cdn.datatables.net"); - ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Style, "https://cdn.datatables.net"); + ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Connection, "https://cdn.datatables.net", "https://code.jquery.com", "https://*.fontawesome.com", "https://oss.maxcdn.com"); + ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Style, "https://cdn.datatables.net", "https://code.jquery.com", "https://www.gstatic.com"); ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Image, "https://cdn.datatables.net"); + ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Font, "https://*.fontawesome.com", "https://fonts.googleapis.com", "https://www.gstatic.com"); } @Override diff --git a/mcc/src/org/labkey/mcc/etl/CacheDemographicsStep.java b/mcc/src/org/labkey/mcc/etl/CacheDemographicsStep.java new file mode 100644 index 000000000..45bf5f4fd --- /dev/null +++ b/mcc/src/org/labkey/mcc/etl/CacheDemographicsStep.java @@ -0,0 +1,51 @@ +package org.labkey.mcc.etl; + +import org.apache.xmlbeans.XmlException; +import org.jetbrains.annotations.NotNull; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.di.TaskRefTask; +import org.labkey.api.ehr.EHRDemographicsService; +import org.labkey.api.pipeline.PipelineJob; +import org.labkey.api.pipeline.PipelineJobException; +import org.labkey.api.pipeline.RecordedActionSet; +import org.labkey.api.query.QueryService; +import org.labkey.api.writer.ContainerUser; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class CacheDemographicsStep implements TaskRefTask +{ + protected ContainerUser _containerUser; + + @Override + public RecordedActionSet run(@NotNull PipelineJob job) throws PipelineJobException + { + TableInfo demographics = QueryService.get().getUserSchema(_containerUser.getUser(), _containerUser.getContainer(), "study").getTable("demographics"); + List ids = new TableSelector(demographics, Collections.singleton("Id"), null, null).getArrayList(String.class); + + EHRDemographicsService.get().getAnimals(_containerUser.getContainer(), ids); + + return new RecordedActionSet(); + } + + @Override + public List getRequiredSettings() + { + return Collections.emptyList(); + } + + @Override + public void setSettings(Map settings) throws XmlException + { + + } + + @Override + public void setContainerUser(ContainerUser containerUser) + { + _containerUser = containerUser; + } +} diff --git a/mcc/test/src/org/labkey/test/tests/mcc/MccTest.java b/mcc/test/src/org/labkey/test/tests/mcc/MccTest.java index 9c5caf4f7..bb7b9668d 100644 --- a/mcc/test/src/org/labkey/test/tests/mcc/MccTest.java +++ b/mcc/test/src/org/labkey/test/tests/mcc/MccTest.java @@ -121,6 +121,8 @@ private void testAnimalImportAndTransfer() throws Exception waitAndClick(Ext4Helper.Locators.ext4Button("OK")); sleep(100); + Ext4FieldRef.getForLabel(this, "Request ID").setValue(12345); + Ext4ComboRef.getForLabel(this, "Target Folder").setComboByDisplayValue("Other"); waitAndClick(Ext4Helper.Locators.ext4Button("Submit")); @@ -180,6 +182,7 @@ private void testAnimalImportAndTransfer() throws Exception waitAndClick(Ext4Helper.Locators.ext4Button("OK")); sleep(100); + Ext4FieldRef.getForLabel(this, "Request ID").setValue(12345); Ext4ComboRef.getForLabel(this, "Target Folder").setComboByDisplayValue("Other"); _ext4Helper.queryOne("#usePreviousId-Animal2", Ext4FieldRef.class).setChecked(true); waitAndClick(Ext4Helper.Locators.ext4Button("Submit")); @@ -237,6 +240,7 @@ private void testAnimalImportAndTransfer() throws Exception waitAndClick(Ext4Helper.Locators.ext4Button("OK")); sleep(100); + Ext4FieldRef.getForLabel(this, "Request ID").setValue(12345); Ext4ComboRef.getForLabel(this, "Target Folder").setComboByDisplayValue("Other"); _ext4Helper.queryOne("#newId-12345", Ext4FieldRef.class).setValue("TheNewId"); waitAndClick(Ext4Helper.Locators.ext4Button("Submit")); diff --git a/primeseq/src/org/labkey/primeseq/pipeline/ExacloudResourceSettings.java b/primeseq/src/org/labkey/primeseq/pipeline/ExacloudResourceSettings.java index f00327a11..77162b833 100644 --- a/primeseq/src/org/labkey/primeseq/pipeline/ExacloudResourceSettings.java +++ b/primeseq/src/org/labkey/primeseq/pipeline/ExacloudResourceSettings.java @@ -39,7 +39,8 @@ public List getParams() put("minValue", 512); }}, 1028), ToolParameterDescriptor.create("localSSD", "Request Nodes With SSD Scratch", "If selected, -C ssdscratch will be added to the submit script, which limits to node with faster SSD scratch space. This might be important for I/O intense jobs.", "checkbox", null, null), - ToolParameterDescriptor.create("gpus", "GPUs", "The number of GPUs requested for this job. If non-zero, the gpu partition will be used.", "ldk-integerfield", null, null) + ToolParameterDescriptor.create("gpus", "GPUs", "The number of GPUs requested for this job. If non-zero, the gpu partition will be used.", "ldk-integerfield", null, null), + ToolParameterDescriptor.create("useExperimentalPartition", "Use RHEL 9.6 Partition", "If selected, jobs will be submitted to the experimental rhel96TESTING partition.", "checkbox", null, null) ); } diff --git a/primeseq/src/org/labkey/primeseq/pipeline/MhcCleanupPipelineJob.java b/primeseq/src/org/labkey/primeseq/pipeline/MhcCleanupPipelineJob.java index 05a526ae6..fe2f8b243 100644 --- a/primeseq/src/org/labkey/primeseq/pipeline/MhcCleanupPipelineJob.java +++ b/primeseq/src/org/labkey/primeseq/pipeline/MhcCleanupPipelineJob.java @@ -285,7 +285,6 @@ private void processAnalysis(int analysisId) dataFilter.addCondition(FieldKey.fromString("percent_from_locus"), getPipelineJob().getLineageThreshold(), CompareType.GT); TableSelector ts = new TableSelector(QueryService.get().getUserSchema(getJob().getUser(), getJob().getContainer(), "sequenceanalysis").getTable("alignment_summary_by_lineage"), PageFlowUtil.set("lineages", "percent_from_locus"), dataFilter, null); - ts.setNamedParameters(Map.of("AnalysisId", analysisId)); ts.forEachResults(rs -> { existingData.put(rs.getString(FieldKey.fromString("lineages")), rs.getDouble(FieldKey.fromString("percent_from_locus"))); @@ -337,7 +336,6 @@ private void processAnalysis(int analysisId) filter.addCondition(FieldKey.fromString("percent_from_locus"), getPipelineJob().getAlleleGroupThreshold(), CompareType.LT); ts = new TableSelector(QueryService.get().getUserSchema(getJob().getUser(), getJob().getContainer(), "sequenceanalysis").getTable("alignment_summary_grouped"), PageFlowUtil.set("rowids"), filter, null); - ts.setNamedParameters(Map.of("AnalysisId", analysisId)); List lowFreqRowIdList = ts.getArrayList(String.class); if (!lowFreqRowIdList.isEmpty()) { @@ -373,7 +371,6 @@ private void processAnalysis(int analysisId) filter.addCondition(FieldKey.fromString("loci"), "MHC", CompareType.CONTAINS); ts = new TableSelector(QueryService.get().getUserSchema(getJob().getUser(), getJob().getContainer(), "sequenceanalysis").getTable("alignment_summary_grouped"), PageFlowUtil.set("rowids"), filter, null); - ts.setNamedParameters(Map.of("AnalysisId", analysisId)); List rowIdList = ts.getArrayList(String.class); if (!rowIdList.isEmpty()) { @@ -392,7 +389,6 @@ private void processAnalysis(int analysisId) SimpleFilter nAlignmentFilter = new SimpleFilter(FieldKey.fromString("analysis_id"), analysisId, CompareType.EQUAL); nAlignmentFilter.addCondition(FieldKey.fromString("nAlignments"), 1, CompareType.GT); ts = new TableSelector(QueryService.get().getUserSchema(getJob().getUser(), getJob().getContainer(), "sequenceanalysis").getTable("alignment_summary_grouped"), PageFlowUtil.set("rowids"), nAlignmentFilter, null); - ts.setNamedParameters(Map.of("AnalysisId", analysisId)); List redundantAlignmentSets = ts.getArrayList(String.class); if (!redundantAlignmentSets.isEmpty()) { @@ -460,7 +456,6 @@ private void processAnalysis(int analysisId) // verify ending data: final Map endingData = new HashMap<>(); ts = new TableSelector(QueryService.get().getUserSchema(getJob().getUser(), getJob().getContainer(), "sequenceanalysis").getTable("alignment_summary_by_lineage"), PageFlowUtil.set("lineages", "percent_from_locus"), dataFilter, null); - ts.setNamedParameters(Map.of("AnalysisId", analysisId)); ts.forEachResults(rs -> { endingData.put(rs.getString(FieldKey.fromString("lineages")), rs.getDouble(FieldKey.fromString("percent_from_locus"))); }); @@ -531,7 +526,6 @@ public AlignmentGroupCompare(final int analysisId, Container c, User u) this.analysisId = analysisId; TableSelector ts = new TableSelector(QueryService.get().getUserSchema(u, c, "sequenceanalysis").getTable("alignment_summary_grouped"), PageFlowUtil.set("analysis_id", "alleles", "lineages", "totalLineages", "total_reads", "total_forward", "total_reverse", "valid_pairs", "rowids"), new SimpleFilter(FieldKey.fromString("analysis_id"), analysisId), null); - ts.setNamedParameters(Map.of("AnalysisId", analysisId)); ts.forEachResults(rs -> { if (rs.getString(FieldKey.fromString("alleles")) == null) { diff --git a/primeseq/src/org/labkey/primeseq/pipeline/SequenceJobResourceAllocator.java b/primeseq/src/org/labkey/primeseq/pipeline/SequenceJobResourceAllocator.java index 8f57b6aec..592bacd0c 100644 --- a/primeseq/src/org/labkey/primeseq/pipeline/SequenceJobResourceAllocator.java +++ b/primeseq/src/org/labkey/primeseq/pipeline/SequenceJobResourceAllocator.java @@ -214,7 +214,7 @@ public Integer getMaxRequestMemory(PipelineJob job) if (isSequenceSequenceOutputHandlerTask(job)) { - File jobXml = new File(job.getLogFile().getParentFile(), FileUtil.getBaseName(job.getLogFile()) + ".job.json.txt"); + File jobXml = FileUtil.appendName(job.getLogFile().getParentFile(), FileUtil.getBaseName(job.getLogFile()) + ".job.json.txt"); if (jobXml.exists()) { try (BufferedReader reader = Readers.getReader(jobXml)) @@ -384,7 +384,7 @@ private void possiblyAddHighIO(PipelineJob job, RemoteExecutionEngine engine, } } - private void possiblyAddDisk(PipelineJob job, RemoteExecutionEngine engine, List lines) + private void possiblyAddDisk(PipelineJob job, RemoteExecutionEngine engine, List lines) { Map params = ((HasJobParams) job).getJobParams(); String val = StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.localDisk")); @@ -405,6 +405,18 @@ private boolean needsGPUs(PipelineJob job) return hasCellBender(job) || StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.gpus")) != null; } + private boolean useExperimentalPartition(PipelineJob job) + { + Map params = ((HasJobParams) job).getJobParams(); + String rawVal = StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.useExperimentalPartition")); + if (rawVal == null) + { + return false; + } + + return Boolean.parseBoolean(rawVal); + } + private boolean hasCellBender(PipelineJob job) { if (!isSequenceSequenceOutputHandlerTask(job)) @@ -412,7 +424,7 @@ private boolean hasCellBender(PipelineJob job) return false; } - File jobXml = new File(job.getLogFile().getParentFile(), FileUtil.getBaseName(job.getLogFile()) + ".job.json.txt"); + File jobXml = FileUtil.appendName(job.getLogFile().getParentFile(), FileUtil.getBaseName(job.getLogFile()) + ".job.json.txt"); if (jobXml.exists()) { try (BufferedReader reader = Readers.getReader(jobXml)) @@ -436,7 +448,7 @@ private boolean hasCellBender(PipelineJob job) return false; } - private void possiblyAddGpus(PipelineJob job, RemoteExecutionEngine engine, List lines) + private void possiblyAddGpus(PipelineJob job, RemoteExecutionEngine engine, List lines) { Map params = ((HasJobParams) job).getJobParams(); String val = StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.gpus")); @@ -457,7 +469,7 @@ private void possiblyAddGpus(PipelineJob job, RemoteExecutionEngine engine, List lines.add("#SBATCH --gres=gpu:" + val); } - private void possiblyAddExclusive(PipelineJob job, RemoteExecutionEngine engine, List lines) + private void possiblyAddExclusive(PipelineJob job, RemoteExecutionEngine engine, List lines) { Map params = ((HasJobParams)job).getJobParams(); String val = StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.useExclusive")); @@ -478,7 +490,7 @@ private void possiblyAddExclusive(PipelineJob job, RemoteExecutionEngine engine, } } - private void possiblyAddSSD(PipelineJob job, RemoteExecutionEngine engine, List lines) + private void possiblyAddSSD(PipelineJob job, RemoteExecutionEngine engine, List lines) { Map params = ((HasJobParams)job).getJobParams(); String val = StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.localSSD")); @@ -499,7 +511,7 @@ private void possiblyAddSSD(PipelineJob job, RemoteExecutionEngine engine, List< } } - private void possiblyAddQOS(PipelineJob job, RemoteExecutionEngine engine, List lines) + private void possiblyAddQOS(PipelineJob job, RemoteExecutionEngine engine, List lines) { //first remove existing removeQueueLines(lines); @@ -579,7 +591,16 @@ private void possiblyAddQOS(PipelineJob job, RemoteExecutionEngine engine, List< private String getPartition(PipelineJob job) { - return needsGPUs(job) ? "gpu" : "batch"; + if (needsGPUs(job)) + { + return "gpu"; + } + else if (useExperimentalPartition(job)) + { + return "rhel96TESTING"; + } + + return "batch"; } private Long getFileSize(PipelineJob job) diff --git a/tcrdb/resources/schemas/tcrdb.xml b/tcrdb/resources/schemas/tcrdb.xml index 0df8f0916..a68916e03 100644 --- a/tcrdb/resources/schemas/tcrdb.xml +++ b/tcrdb/resources/schemas/tcrdb.xml @@ -165,7 +165,7 @@ CDR3 With Segments - CDR3 With Productivity + CDR3 With Productivity Cognate CDR3s diff --git a/tcrdb/src/org/labkey/tcrdb/pipeline/CellRangerVDJUtils.java b/tcrdb/src/org/labkey/tcrdb/pipeline/CellRangerVDJUtils.java index b9a86e8db..04db1c39d 100644 --- a/tcrdb/src/org/labkey/tcrdb/pipeline/CellRangerVDJUtils.java +++ b/tcrdb/src/org/labkey/tcrdb/pipeline/CellRangerVDJUtils.java @@ -310,6 +310,8 @@ else if ("Low Counts".equals(hto)) while ((line = reader.readNext()) != null) { idx++; + Set comments = new HashSet<>(); + if (idx == 1) { _log.debug("parsing header, length: " + line.length); @@ -346,7 +348,7 @@ else if ("Low Counts".equals(hto)) if ("False".equalsIgnoreCase(line[headerToIdx.get(HEADER_FIELD.FULL_LENGTH)])) { notFullLength++; - continue; + comments.add("Not full length"); } //NOTE: 10x appends "-1" to barcode sequences @@ -435,10 +437,26 @@ else if (discordantBarcodes.contains(barcode)) am.jHit = removeNone(line[headerToIdx.get(HEADER_FIELD.J_GENE)]); am.cHit = removeNone(line[headerToIdx.get(HEADER_FIELD.C_GENE)]); am.cdr3Nt = removeNone(line[headerToIdx.get(HEADER_FIELD.CDR3_NT)]); + if (!comments.isEmpty()) + { + am.comment = StringUtils.join(comments, "\n"); + } } else { am = rows.get(key); + if (!comments.isEmpty()) + { + if (am.comment != null) + { + comments.addAll(Arrays.asList(am.comment.split("\n"))); + am.comment = StringUtils.join(comments, "\n"); + } + else + { + am.comment = StringUtils.join(comments, "\n"); + } + } } uniqueContigNames.add(am.coalescedContigName); @@ -456,7 +474,7 @@ else if (discordantBarcodes.contains(barcode)) _log.info("total rows marked as cells: " + totalCells); _log.info("total clonotype rows without CDR3: " + noCDR3); _log.info("total clonotype rows discarded for no C-gene: " + noCGene); - _log.info("total clonotype rows discarded for not full length: " + notFullLength); + _log.info("total clonotype rows not full length (these are imported): " + notFullLength); _log.info("total clonotype rows discarded for lacking consensus clonotype: " + noConsensusClonotype); _log.info("total clonotype rows skipped for unknown barcocdes: " + totalSkipped + " (" + (NumberFormat.getPercentInstance().format(totalSkipped / (double)totalCells)) + ")"); _log.info("total clonotype rows skipped because they are doublets: " + doubletSkipped + " (" + (NumberFormat.getPercentInstance().format(doubletSkipped / (double)totalCells)) + ")"); @@ -614,6 +632,7 @@ private static class AssayModel private String jHit; private String cHit; private int cdna; + private String comment; private final Set barcodes = new HashSet<>(); private String coalescedContigName; @@ -648,6 +667,7 @@ private Map processRow(AssayModel assayModel, AnalysisModel mode row.put("cdr3", assayModel.cdr3); row.put("cdr3_nt", assayModel.cdr3Nt); row.put("count", assayModel.barcodes.size()); + row.put("comment", assayModel.comment); double fraction = (double)assayModel.barcodes.size() / totalCellsBySample.get(assayModel.cdna).size(); row.put("fraction", fraction); @@ -700,7 +720,7 @@ private void saveRun(PipelineJob job, ExpProtocol protocol, AnalysisModel model, JSONObject json = new JSONObject(); json.put("Run", runProps); - File assayTmp = new File(outDir, FileUtil.makeLegalName("10x-assay-upload_" + FileUtil.getTimestamp() + ".txt")); + File assayTmp = FileUtil.appendName(outDir, FileUtil.makeLegalName("10x-assay-upload_" + FileUtil.getTimestamp() + ".txt")); if (assayTmp.exists()) { assayTmp.delete();