diff --git a/mcc/resources/queries/study/demographicsMostRecentDeparture.query.xml b/mcc/resources/queries/study/demographicsMostRecentDeparture.query.xml
index 7136f001a..69afb887b 100644
--- a/mcc/resources/queries/study/demographicsMostRecentDeparture.query.xml
+++ b/mcc/resources/queries/study/demographicsMostRecentDeparture.query.xml
@@ -14,6 +14,9 @@
query.sort=-Date&
+
+ Destination(s)
+
MCC Request Id(s)
diff --git a/mcc/resources/queries/study/demographicsMostRecentDeparture.sql b/mcc/resources/queries/study/demographicsMostRecentDeparture.sql
index f504e2dee..b3a5b12f6 100644
--- a/mcc/resources/queries/study/demographicsMostRecentDeparture.sql
+++ b/mcc/resources/queries/study/demographicsMostRecentDeparture.sql
@@ -2,6 +2,7 @@ select
T1.Id,
max(T1.date) as MostRecentDeparture,
+ group_concat(distinct t1.destination) as destination,
group_concat(distinct t1.mccRequestId) as mccRequestId
FROM study.departure T1
diff --git a/mcc/resources/views/mccDataImport.view.xml b/mcc/resources/views/mccDataImport.view.xml
index 8418d6d37..63e81ecc3 100644
--- a/mcc/resources/views/mccDataImport.view.xml
+++ b/mcc/resources/views/mccDataImport.view.xml
@@ -19,12 +19,5 @@
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/mcc/resources/web/mcc/exampleData/MCC_Data_Template.xlsx b/mcc/resources/web/mcc/exampleData/MCC_Data_Template.xlsx
index 916878789..26b4e6ee8 100644
Binary files a/mcc/resources/web/mcc/exampleData/MCC_Data_Template.xlsx and b/mcc/resources/web/mcc/exampleData/MCC_Data_Template.xlsx differ
diff --git a/mcc/resources/web/mcc/panel/MccImportPanel.js b/mcc/resources/web/mcc/panel/MccImportPanel.js
index e00d01a6e..797829bd9 100644
--- a/mcc/resources/web/mcc/panel/MccImportPanel.js
+++ b/mcc/resources/web/mcc/panel/MccImportPanel.js
@@ -29,7 +29,7 @@ Ext4.define('MCC.panel.MccImportPanel', {
allowBlank: true
},{
name: 'Id',
- labels: ['Id', 'animal ID', 'AnimalId', 'MarmId', 'Marm Id'],
+ labels: ['Id', 'animal ID', 'AnimalId', 'MarmId', 'Marm Id', 'Center Id'],
allowRowSpan: false,
alwaysShow: true,
transform: 'animalId',
@@ -37,7 +37,7 @@ Ext4.define('MCC.panel.MccImportPanel', {
expectInImport: true
},{
name: 'alternateIds',
- labels: ['Alternate Ids', 'previous Ids'],
+ labels: ['Alternate Ids', 'AlternateIds', 'previous Ids'],
allowRowSpan: false,
alwaysShow: true,
transform: 'alternateIds',
@@ -121,6 +121,13 @@ Ext4.define('MCC.panel.MccImportPanel', {
alwaysShow: true,
allowBlank: false,
transform: 'date'
+ },{
+ name: 'mccAlias',
+ labels: ['mccAlias', 'MCC ID', 'mccId', 'MCC_ID'],
+ alwaysShow: true,
+ allowRowSpan: false,
+ allowBlank: true,
+ expectInImport: false
},{
name: 'u24_status',
labels: ['U24 status'],
@@ -131,8 +138,8 @@ Ext4.define('MCC.panel.MccImportPanel', {
expectInImport: true
},{
name: 'availability',
- // NOTE: availalble was a typo in one generation of the input templates:
- labels: ['Available to Transfer', 'available to transfer', 'availalble to transfer'],
+ // NOTE: available was a typo in one generation of the input templates:
+ labels: ['Available to Transfer', 'available to transfer', 'available to transfer'],
allowRowSpan: false,
allowBlank: true,
transform: 'available',
@@ -389,16 +396,95 @@ Ext4.define('MCC.panel.MccImportPanel', {
getPanelItems: function(){
return [{
style: 'padding-top: 10px;',
- html: 'This page is designed to help import MCC animal-level data. Use the fields below to download the excel template and paste data to import.'
+ html: 'This page is designed to help import MCC animal-level data. Use the fields below to download the excel template and paste data to import. The general idea is: 1) Download a blank excel template. This excel workbook contains dropdowns, etc., 2) Use the second button to download a table with the current data for the selected colony. 3) Copy/paste that raw data into the template.
'
},{
layout: 'hbox',
style: 'margin-bottom: 20px;',
items: [{
xtype: 'button',
- text: 'Download Template',
+ text: 'Download Blank Template',
border: true,
scope: this,
href: LABKEY.ActionURL.getContextPath() + '/mcc/exampleData/MCC_Data_Template.xlsx'
+ },{
+ xtype: 'button',
+ text: 'Download Template Data',
+ style: 'padding-left: 5px',
+ border: true,
+ scope: this,
+ handler: function(btn){
+ var colonyName = btn.up('mcc-mccimportpanel').down('#centerName').getValue()
+ if (!colonyName) {
+ Ext4.Msg.alert('Error', 'Must enter the colony name')
+ return
+ }
+
+ Ext4.Msg.wait('Loading...');
+ var fieldMap = {
+ 'Id/mccAlias/externalAlias': 'MCC_ID',
+ 'Id': 'Center Id',
+ 'alternateIds': 'Previous IDs',
+ 'colony': 'Current Colony',
+ 'source': 'Source Colony',
+ 'gender': 'Sex',
+ 'birth': 'Birth',
+ 'calculated_status': 'status',
+ 'Id/MostRecentDeparture/destination': 'Shipping Destination',
+ 'Id/MostRecentDeparture/MostRecentDeparture': 'Shipping Date',
+ 'death': 'Death',
+ 'deathCause': 'Cause of Death',
+ 'dam': 'Material ID',
+ 'sire': 'Paternal ID',
+ 'Id/MostRecentWeight/MostRecentWeightGrams': 'Weight (g)',
+ 'Id/MostRecentWeight/MostRecentWeightDate': 'Date of Weight',
+ 'u24_status': 'U24 Status',
+ 'Id/mostRecentObservations/availability::observation': 'Availability',
+ 'Id/mostRecentObservations/current_housing_status::observation': 'Current Housing Status',
+ 'breeding partner ID': 'Breeding Partner ID',
+ 'Id/mostRecentObservations/infant_history::observation': 'Infant History',
+ 'Id/mostRecentObservations/fertility_status::observation': 'Fertility Status',
+ 'Id/mostRecentObservations/medical_history::observation': 'Medical History'
+ }
+
+ LABKEY.Query.selectRows({
+ schemaName: 'study',
+ queryName: 'demographics',
+ columns: Object.keys(fieldMap).join(','),
+ scope: this,
+ failure: LDK.Utils.getErrorCallback(),
+ filterArray: [
+ LABKEY.Filter.create('colony', colonyName),
+ LABKEY.Filter.create('calculated_status', 'Alive')
+ ],
+ success: function (results) {
+ Ext4.Msg.hide();
+
+ const rows = results.rows.map(row => {
+ const newRow = []
+ Object.keys(fieldMap).forEach(key => {
+ // Always leave these empty:
+ if (key === 'Id/MostRecentWeight/MostRecentWeightGrams' || key === 'Id/MostRecentWeight/MostRecentWeightDate') {
+ newRow.push('')
+ } else if (row[key] !== undefined) {
+ newRow.push(Ext4.isArray(row[key]) ? row[key].join(',') : row[key])
+ } else {
+ newRow.push('')
+ }
+ })
+
+ return newRow
+ })
+
+ LABKEY.Utils.convertToExcel({
+ fileName : 'MCC_Import_' + colonyName + '.xlsx',
+ sheets : [{
+ name: 'data',
+ data: [Object.values(fieldMap)].concat(rows)
+ }]
+ });
+ }
+ });
+ }
}]
},{
xtype: 'datefield',
@@ -480,7 +566,7 @@ Ext4.define('MCC.panel.MccImportPanel', {
LABKEY.Query.selectRows({
schemaName: 'study',
queryName: 'demographics',
- columns: 'Id,alternateIds,dam,sire,birth,death,colony,objectid,lsid,mccAlias/externalId,Id/death/date,Id/MostRecentDeparture/MostRecentDeparture',
+ columns: 'Id,alternateIds,dam,sire,birth,death,colony,objectid,lsid,Id/mccAlias/externalAlias,Id/death/date,Id/MostRecentDeparture/MostRecentDeparture',
scope: this,
failure: LDK.Utils.getErrorCallback(),
success: function(results) {
@@ -523,13 +609,15 @@ Ext4.define('MCC.panel.MccImportPanel', {
row.existingRecord = row.Id && demographicsRecords.allIds.indexOf(row.Id.toLowerCase()) > -1;
if (row.existingRecord) {
var existingRecord = demographicsRecords.rowMap[row.Id.toLowerCase()];
+ existingRecord.mccAlias = existingRecord['Id/mccAlias/externalAlias']
+
if (existingRecord.colony !== row.colony) {
row.errors.push('Colony does not match existing row: ' + existingRecord.colony);
}
else {
row.objectId = existingRecord.objectid;
- var fields = ['birth', 'dam', 'sire', 'source'];
+ var fields = ['birth', 'dam', 'sire', 'source', 'mccAlias'];
for (var idx in fields) {
var fn = fields[idx];
@@ -1110,6 +1198,7 @@ Ext4.define('MCC.panel.MccImportPanel', {
Ext4.Array.forEach(rawData, function(row){
if (row.existingRecord) {
+ // Note: this was merged with the existing values upstream of this
demographicsUpdates.push({
Id: row.Id,
date: row.date,
diff --git a/mcc/resources/web/mcc/window/MarkShippedWindow.js b/mcc/resources/web/mcc/window/MarkShippedWindow.js
index 9cbf6818e..a6c03a09c 100644
--- a/mcc/resources/web/mcc/window/MarkShippedWindow.js
+++ b/mcc/resources/web/mcc/window/MarkShippedWindow.js
@@ -32,6 +32,11 @@ Ext4.define('MCC.window.MarkShippedWindow', {
html: 'This will:
1) Mark the selected animals as shipped from this center
2) Enter a new demographics record in the selected study
3) Preserve the MCC ID for each animal.',
border: false,
style: 'padding-bottom: 10px;'
+ },{
+ xtype: 'ldk-integerfield',
+ fieldLabel: 'Request ID',
+ itemId: 'requestId',
+ allowBlank: false
},{
xtype: 'datefield',
fieldLabel: 'Effective Date',
@@ -61,7 +66,7 @@ Ext4.define('MCC.window.MarkShippedWindow', {
fieldLabel: 'Target Folder',
itemId: 'targetFolder',
allowBlank: false,
- displayField: 'Name',
+ displayField: 'DisplayName',
valueField: 'Path',
triggerAction: 'all',
queryMode: 'local',
@@ -71,9 +76,9 @@ Ext4.define('MCC.window.MarkShippedWindow', {
containerPath: ctx.MCCInternalDataContainer,
schemaName: 'core',
queryName: 'containers',
- columns: 'EntityId,Name,Parent,Path',
+ columns: 'EntityId,DisplayName,Parent,Path',
containerFilter: 'CurrentAndSubfolders',
- sort: 'Name',
+ sort: 'DisplayName',
autoLoad: true,
listeners: {
load: function(store) {
@@ -111,10 +116,6 @@ Ext4.define('MCC.window.MarkShippedWindow', {
xtype: 'displayfield',
value: 'Animal ID',
width: 150
- },{
- xtype: 'displayfield',
- value: 'Request ID',
- width: 150
},{
xtype: 'displayfield',
value: 'Keep Existing ID?',
@@ -129,11 +130,6 @@ Ext4.define('MCC.window.MarkShippedWindow', {
fields = fields.concat([{
xtype: 'displayfield',
value: animalId,
- },{
- xtype: 'ldk-integerfield',
- minValue: 1,
- itemId: 'requestId-' + animalId,
- allowBlank: true
},{
xtype: 'checkbox',
itemId: 'usePreviousId-' + animalId,
@@ -158,7 +154,7 @@ Ext4.define('MCC.window.MarkShippedWindow', {
return {
layout: {
type: 'table',
- columns: 4
+ columns: 3
},
width: 600,
border: false,
@@ -183,12 +179,13 @@ Ext4.define('MCC.window.MarkShippedWindow', {
var win = btn.up('window');
var lsids = win.rowIds;
+ var requestId = win.down('#requestId').getValue();
var effectiveDate = win.down('#effectiveDate').getValue();
var centerName = win.down('#centerName').getValue();
var targetFolder = win.down('#targetFolder').getValue();
- if (!effectiveDate || !centerName || !targetFolder) {
- Ext4.Msg.alert('Error', 'Must provide date, center name, and target folder');
+ if (!requestId || !effectiveDate || !centerName || !targetFolder) {
+ Ext4.Msg.alert('Error', 'Must provide request Id, date, center name, and target folder');
return;
}
@@ -250,6 +247,7 @@ Ext4.define('MCC.window.MarkShippedWindow', {
},
doSave: function(win, results, preexistingIdsInTargetFolder){
+ var requestId = win.down('#requestId').getValue();
var effectiveDate = win.down('#effectiveDate').getValue();
var centerName = win.down('#centerName').getValue();
var targetFolder = win.down('#targetFolder').getValue();
@@ -259,7 +257,6 @@ Ext4.define('MCC.window.MarkShippedWindow', {
var hadError = false;
Ext4.Array.forEach(results.rows, function(row){
var effectiveId = win.down('#usePreviousId-' + row.Id).getValue() ? row.Id : win.down('#newId-' + row.Id).getValue();
- var requestId = win.down('#requestId-' + row.Id).getValue();
// This should be checked above, although perhaps case sensitivity could get involved:
LDK.Assert.assertNotEmpty('Missing effective ID after query', effectiveId);
diff --git a/mcc/src/org/labkey/mcc/MccModule.java b/mcc/src/org/labkey/mcc/MccModule.java
index 11292760b..638a94fc6 100644
--- a/mcc/src/org/labkey/mcc/MccModule.java
+++ b/mcc/src/org/labkey/mcc/MccModule.java
@@ -138,9 +138,10 @@ protected void doStartupAfterSpringConfig(ModuleContext moduleContext)
SystemMaintenance.addTask(new MccMaintenanceTask());
- ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Connection, "https://cdn.datatables.net");
- ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Style, "https://cdn.datatables.net");
+ ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Connection, "https://cdn.datatables.net", "https://code.jquery.com", "https://*.fontawesome.com", "https://oss.maxcdn.com");
+ ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Style, "https://cdn.datatables.net", "https://code.jquery.com", "https://www.gstatic.com");
ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Image, "https://cdn.datatables.net");
+ ContentSecurityPolicyFilter.registerAllowedSources(this.getClass().getName(), Directive.Font, "https://*.fontawesome.com", "https://fonts.googleapis.com", "https://www.gstatic.com");
}
@Override
diff --git a/mcc/src/org/labkey/mcc/etl/CacheDemographicsStep.java b/mcc/src/org/labkey/mcc/etl/CacheDemographicsStep.java
new file mode 100644
index 000000000..45bf5f4fd
--- /dev/null
+++ b/mcc/src/org/labkey/mcc/etl/CacheDemographicsStep.java
@@ -0,0 +1,51 @@
+package org.labkey.mcc.etl;
+
+import org.apache.xmlbeans.XmlException;
+import org.jetbrains.annotations.NotNull;
+import org.labkey.api.data.TableInfo;
+import org.labkey.api.data.TableSelector;
+import org.labkey.api.di.TaskRefTask;
+import org.labkey.api.ehr.EHRDemographicsService;
+import org.labkey.api.pipeline.PipelineJob;
+import org.labkey.api.pipeline.PipelineJobException;
+import org.labkey.api.pipeline.RecordedActionSet;
+import org.labkey.api.query.QueryService;
+import org.labkey.api.writer.ContainerUser;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+public class CacheDemographicsStep implements TaskRefTask
+{
+ protected ContainerUser _containerUser;
+
+ @Override
+ public RecordedActionSet run(@NotNull PipelineJob job) throws PipelineJobException
+ {
+ TableInfo demographics = QueryService.get().getUserSchema(_containerUser.getUser(), _containerUser.getContainer(), "study").getTable("demographics");
+ List ids = new TableSelector(demographics, Collections.singleton("Id"), null, null).getArrayList(String.class);
+
+ EHRDemographicsService.get().getAnimals(_containerUser.getContainer(), ids);
+
+ return new RecordedActionSet();
+ }
+
+ @Override
+ public List getRequiredSettings()
+ {
+ return Collections.emptyList();
+ }
+
+ @Override
+ public void setSettings(Map settings) throws XmlException
+ {
+
+ }
+
+ @Override
+ public void setContainerUser(ContainerUser containerUser)
+ {
+ _containerUser = containerUser;
+ }
+}
diff --git a/mcc/test/src/org/labkey/test/tests/mcc/MccTest.java b/mcc/test/src/org/labkey/test/tests/mcc/MccTest.java
index 9c5caf4f7..bb7b9668d 100644
--- a/mcc/test/src/org/labkey/test/tests/mcc/MccTest.java
+++ b/mcc/test/src/org/labkey/test/tests/mcc/MccTest.java
@@ -121,6 +121,8 @@ private void testAnimalImportAndTransfer() throws Exception
waitAndClick(Ext4Helper.Locators.ext4Button("OK"));
sleep(100);
+ Ext4FieldRef.getForLabel(this, "Request ID").setValue(12345);
+
Ext4ComboRef.getForLabel(this, "Target Folder").setComboByDisplayValue("Other");
waitAndClick(Ext4Helper.Locators.ext4Button("Submit"));
@@ -180,6 +182,7 @@ private void testAnimalImportAndTransfer() throws Exception
waitAndClick(Ext4Helper.Locators.ext4Button("OK"));
sleep(100);
+ Ext4FieldRef.getForLabel(this, "Request ID").setValue(12345);
Ext4ComboRef.getForLabel(this, "Target Folder").setComboByDisplayValue("Other");
_ext4Helper.queryOne("#usePreviousId-Animal2", Ext4FieldRef.class).setChecked(true);
waitAndClick(Ext4Helper.Locators.ext4Button("Submit"));
@@ -237,6 +240,7 @@ private void testAnimalImportAndTransfer() throws Exception
waitAndClick(Ext4Helper.Locators.ext4Button("OK"));
sleep(100);
+ Ext4FieldRef.getForLabel(this, "Request ID").setValue(12345);
Ext4ComboRef.getForLabel(this, "Target Folder").setComboByDisplayValue("Other");
_ext4Helper.queryOne("#newId-12345", Ext4FieldRef.class).setValue("TheNewId");
waitAndClick(Ext4Helper.Locators.ext4Button("Submit"));
diff --git a/primeseq/src/org/labkey/primeseq/pipeline/ExacloudResourceSettings.java b/primeseq/src/org/labkey/primeseq/pipeline/ExacloudResourceSettings.java
index f00327a11..77162b833 100644
--- a/primeseq/src/org/labkey/primeseq/pipeline/ExacloudResourceSettings.java
+++ b/primeseq/src/org/labkey/primeseq/pipeline/ExacloudResourceSettings.java
@@ -39,7 +39,8 @@ public List getParams()
put("minValue", 512);
}}, 1028),
ToolParameterDescriptor.create("localSSD", "Request Nodes With SSD Scratch", "If selected, -C ssdscratch will be added to the submit script, which limits to node with faster SSD scratch space. This might be important for I/O intense jobs.", "checkbox", null, null),
- ToolParameterDescriptor.create("gpus", "GPUs", "The number of GPUs requested for this job. If non-zero, the gpu partition will be used.", "ldk-integerfield", null, null)
+ ToolParameterDescriptor.create("gpus", "GPUs", "The number of GPUs requested for this job. If non-zero, the gpu partition will be used.", "ldk-integerfield", null, null),
+ ToolParameterDescriptor.create("useExperimentalPartition", "Use RHEL 9.6 Partition", "If selected, jobs will be submitted to the experimental rhel96TESTING partition.", "checkbox", null, null)
);
}
diff --git a/primeseq/src/org/labkey/primeseq/pipeline/MhcCleanupPipelineJob.java b/primeseq/src/org/labkey/primeseq/pipeline/MhcCleanupPipelineJob.java
index 05a526ae6..fe2f8b243 100644
--- a/primeseq/src/org/labkey/primeseq/pipeline/MhcCleanupPipelineJob.java
+++ b/primeseq/src/org/labkey/primeseq/pipeline/MhcCleanupPipelineJob.java
@@ -285,7 +285,6 @@ private void processAnalysis(int analysisId)
dataFilter.addCondition(FieldKey.fromString("percent_from_locus"), getPipelineJob().getLineageThreshold(), CompareType.GT);
TableSelector ts = new TableSelector(QueryService.get().getUserSchema(getJob().getUser(), getJob().getContainer(), "sequenceanalysis").getTable("alignment_summary_by_lineage"), PageFlowUtil.set("lineages", "percent_from_locus"), dataFilter, null);
- ts.setNamedParameters(Map.of("AnalysisId", analysisId));
ts.forEachResults(rs -> {
existingData.put(rs.getString(FieldKey.fromString("lineages")), rs.getDouble(FieldKey.fromString("percent_from_locus")));
@@ -337,7 +336,6 @@ private void processAnalysis(int analysisId)
filter.addCondition(FieldKey.fromString("percent_from_locus"), getPipelineJob().getAlleleGroupThreshold(), CompareType.LT);
ts = new TableSelector(QueryService.get().getUserSchema(getJob().getUser(), getJob().getContainer(), "sequenceanalysis").getTable("alignment_summary_grouped"), PageFlowUtil.set("rowids"), filter, null);
- ts.setNamedParameters(Map.of("AnalysisId", analysisId));
List lowFreqRowIdList = ts.getArrayList(String.class);
if (!lowFreqRowIdList.isEmpty())
{
@@ -373,7 +371,6 @@ private void processAnalysis(int analysisId)
filter.addCondition(FieldKey.fromString("loci"), "MHC", CompareType.CONTAINS);
ts = new TableSelector(QueryService.get().getUserSchema(getJob().getUser(), getJob().getContainer(), "sequenceanalysis").getTable("alignment_summary_grouped"), PageFlowUtil.set("rowids"), filter, null);
- ts.setNamedParameters(Map.of("AnalysisId", analysisId));
List rowIdList = ts.getArrayList(String.class);
if (!rowIdList.isEmpty())
{
@@ -392,7 +389,6 @@ private void processAnalysis(int analysisId)
SimpleFilter nAlignmentFilter = new SimpleFilter(FieldKey.fromString("analysis_id"), analysisId, CompareType.EQUAL);
nAlignmentFilter.addCondition(FieldKey.fromString("nAlignments"), 1, CompareType.GT);
ts = new TableSelector(QueryService.get().getUserSchema(getJob().getUser(), getJob().getContainer(), "sequenceanalysis").getTable("alignment_summary_grouped"), PageFlowUtil.set("rowids"), nAlignmentFilter, null);
- ts.setNamedParameters(Map.of("AnalysisId", analysisId));
List redundantAlignmentSets = ts.getArrayList(String.class);
if (!redundantAlignmentSets.isEmpty())
{
@@ -460,7 +456,6 @@ private void processAnalysis(int analysisId)
// verify ending data:
final Map endingData = new HashMap<>();
ts = new TableSelector(QueryService.get().getUserSchema(getJob().getUser(), getJob().getContainer(), "sequenceanalysis").getTable("alignment_summary_by_lineage"), PageFlowUtil.set("lineages", "percent_from_locus"), dataFilter, null);
- ts.setNamedParameters(Map.of("AnalysisId", analysisId));
ts.forEachResults(rs -> {
endingData.put(rs.getString(FieldKey.fromString("lineages")), rs.getDouble(FieldKey.fromString("percent_from_locus")));
});
@@ -531,7 +526,6 @@ public AlignmentGroupCompare(final int analysisId, Container c, User u)
this.analysisId = analysisId;
TableSelector ts = new TableSelector(QueryService.get().getUserSchema(u, c, "sequenceanalysis").getTable("alignment_summary_grouped"), PageFlowUtil.set("analysis_id", "alleles", "lineages", "totalLineages", "total_reads", "total_forward", "total_reverse", "valid_pairs", "rowids"), new SimpleFilter(FieldKey.fromString("analysis_id"), analysisId), null);
- ts.setNamedParameters(Map.of("AnalysisId", analysisId));
ts.forEachResults(rs -> {
if (rs.getString(FieldKey.fromString("alleles")) == null)
{
diff --git a/primeseq/src/org/labkey/primeseq/pipeline/SequenceJobResourceAllocator.java b/primeseq/src/org/labkey/primeseq/pipeline/SequenceJobResourceAllocator.java
index 8f57b6aec..592bacd0c 100644
--- a/primeseq/src/org/labkey/primeseq/pipeline/SequenceJobResourceAllocator.java
+++ b/primeseq/src/org/labkey/primeseq/pipeline/SequenceJobResourceAllocator.java
@@ -214,7 +214,7 @@ public Integer getMaxRequestMemory(PipelineJob job)
if (isSequenceSequenceOutputHandlerTask(job))
{
- File jobXml = new File(job.getLogFile().getParentFile(), FileUtil.getBaseName(job.getLogFile()) + ".job.json.txt");
+ File jobXml = FileUtil.appendName(job.getLogFile().getParentFile(), FileUtil.getBaseName(job.getLogFile()) + ".job.json.txt");
if (jobXml.exists())
{
try (BufferedReader reader = Readers.getReader(jobXml))
@@ -384,7 +384,7 @@ private void possiblyAddHighIO(PipelineJob job, RemoteExecutionEngine> engine,
}
}
- private void possiblyAddDisk(PipelineJob job, RemoteExecutionEngine engine, List lines)
+ private void possiblyAddDisk(PipelineJob job, RemoteExecutionEngine> engine, List lines)
{
Map params = ((HasJobParams) job).getJobParams();
String val = StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.localDisk"));
@@ -405,6 +405,18 @@ private boolean needsGPUs(PipelineJob job)
return hasCellBender(job) || StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.gpus")) != null;
}
+ private boolean useExperimentalPartition(PipelineJob job)
+ {
+ Map params = ((HasJobParams) job).getJobParams();
+ String rawVal = StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.useExperimentalPartition"));
+ if (rawVal == null)
+ {
+ return false;
+ }
+
+ return Boolean.parseBoolean(rawVal);
+ }
+
private boolean hasCellBender(PipelineJob job)
{
if (!isSequenceSequenceOutputHandlerTask(job))
@@ -412,7 +424,7 @@ private boolean hasCellBender(PipelineJob job)
return false;
}
- File jobXml = new File(job.getLogFile().getParentFile(), FileUtil.getBaseName(job.getLogFile()) + ".job.json.txt");
+ File jobXml = FileUtil.appendName(job.getLogFile().getParentFile(), FileUtil.getBaseName(job.getLogFile()) + ".job.json.txt");
if (jobXml.exists())
{
try (BufferedReader reader = Readers.getReader(jobXml))
@@ -436,7 +448,7 @@ private boolean hasCellBender(PipelineJob job)
return false;
}
- private void possiblyAddGpus(PipelineJob job, RemoteExecutionEngine engine, List lines)
+ private void possiblyAddGpus(PipelineJob job, RemoteExecutionEngine> engine, List lines)
{
Map params = ((HasJobParams) job).getJobParams();
String val = StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.gpus"));
@@ -457,7 +469,7 @@ private void possiblyAddGpus(PipelineJob job, RemoteExecutionEngine engine, List
lines.add("#SBATCH --gres=gpu:" + val);
}
- private void possiblyAddExclusive(PipelineJob job, RemoteExecutionEngine engine, List lines)
+ private void possiblyAddExclusive(PipelineJob job, RemoteExecutionEngine> engine, List lines)
{
Map params = ((HasJobParams)job).getJobParams();
String val = StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.useExclusive"));
@@ -478,7 +490,7 @@ private void possiblyAddExclusive(PipelineJob job, RemoteExecutionEngine engine,
}
}
- private void possiblyAddSSD(PipelineJob job, RemoteExecutionEngine engine, List lines)
+ private void possiblyAddSSD(PipelineJob job, RemoteExecutionEngine> engine, List lines)
{
Map params = ((HasJobParams)job).getJobParams();
String val = StringUtils.trimToNull(params.get("resourceSettings.resourceSettings.localSSD"));
@@ -499,7 +511,7 @@ private void possiblyAddSSD(PipelineJob job, RemoteExecutionEngine engine, List<
}
}
- private void possiblyAddQOS(PipelineJob job, RemoteExecutionEngine engine, List lines)
+ private void possiblyAddQOS(PipelineJob job, RemoteExecutionEngine> engine, List lines)
{
//first remove existing
removeQueueLines(lines);
@@ -579,7 +591,16 @@ private void possiblyAddQOS(PipelineJob job, RemoteExecutionEngine engine, List<
private String getPartition(PipelineJob job)
{
- return needsGPUs(job) ? "gpu" : "batch";
+ if (needsGPUs(job))
+ {
+ return "gpu";
+ }
+ else if (useExperimentalPartition(job))
+ {
+ return "rhel96TESTING";
+ }
+
+ return "batch";
}
private Long getFileSize(PipelineJob job)
diff --git a/tcrdb/resources/schemas/tcrdb.xml b/tcrdb/resources/schemas/tcrdb.xml
index 0df8f0916..a68916e03 100644
--- a/tcrdb/resources/schemas/tcrdb.xml
+++ b/tcrdb/resources/schemas/tcrdb.xml
@@ -165,7 +165,7 @@
CDR3 With Segments
- CDR3 With Productivity
+ CDR3 With Productivity
Cognate CDR3s
diff --git a/tcrdb/src/org/labkey/tcrdb/pipeline/CellRangerVDJUtils.java b/tcrdb/src/org/labkey/tcrdb/pipeline/CellRangerVDJUtils.java
index b9a86e8db..04db1c39d 100644
--- a/tcrdb/src/org/labkey/tcrdb/pipeline/CellRangerVDJUtils.java
+++ b/tcrdb/src/org/labkey/tcrdb/pipeline/CellRangerVDJUtils.java
@@ -310,6 +310,8 @@ else if ("Low Counts".equals(hto))
while ((line = reader.readNext()) != null)
{
idx++;
+ Set comments = new HashSet<>();
+
if (idx == 1)
{
_log.debug("parsing header, length: " + line.length);
@@ -346,7 +348,7 @@ else if ("Low Counts".equals(hto))
if ("False".equalsIgnoreCase(line[headerToIdx.get(HEADER_FIELD.FULL_LENGTH)]))
{
notFullLength++;
- continue;
+ comments.add("Not full length");
}
//NOTE: 10x appends "-1" to barcode sequences
@@ -435,10 +437,26 @@ else if (discordantBarcodes.contains(barcode))
am.jHit = removeNone(line[headerToIdx.get(HEADER_FIELD.J_GENE)]);
am.cHit = removeNone(line[headerToIdx.get(HEADER_FIELD.C_GENE)]);
am.cdr3Nt = removeNone(line[headerToIdx.get(HEADER_FIELD.CDR3_NT)]);
+ if (!comments.isEmpty())
+ {
+ am.comment = StringUtils.join(comments, "\n");
+ }
}
else
{
am = rows.get(key);
+ if (!comments.isEmpty())
+ {
+ if (am.comment != null)
+ {
+ comments.addAll(Arrays.asList(am.comment.split("\n")));
+ am.comment = StringUtils.join(comments, "\n");
+ }
+ else
+ {
+ am.comment = StringUtils.join(comments, "\n");
+ }
+ }
}
uniqueContigNames.add(am.coalescedContigName);
@@ -456,7 +474,7 @@ else if (discordantBarcodes.contains(barcode))
_log.info("total rows marked as cells: " + totalCells);
_log.info("total clonotype rows without CDR3: " + noCDR3);
_log.info("total clonotype rows discarded for no C-gene: " + noCGene);
- _log.info("total clonotype rows discarded for not full length: " + notFullLength);
+ _log.info("total clonotype rows not full length (these are imported): " + notFullLength);
_log.info("total clonotype rows discarded for lacking consensus clonotype: " + noConsensusClonotype);
_log.info("total clonotype rows skipped for unknown barcocdes: " + totalSkipped + " (" + (NumberFormat.getPercentInstance().format(totalSkipped / (double)totalCells)) + ")");
_log.info("total clonotype rows skipped because they are doublets: " + doubletSkipped + " (" + (NumberFormat.getPercentInstance().format(doubletSkipped / (double)totalCells)) + ")");
@@ -614,6 +632,7 @@ private static class AssayModel
private String jHit;
private String cHit;
private int cdna;
+ private String comment;
private final Set barcodes = new HashSet<>();
private String coalescedContigName;
@@ -648,6 +667,7 @@ private Map processRow(AssayModel assayModel, AnalysisModel mode
row.put("cdr3", assayModel.cdr3);
row.put("cdr3_nt", assayModel.cdr3Nt);
row.put("count", assayModel.barcodes.size());
+ row.put("comment", assayModel.comment);
double fraction = (double)assayModel.barcodes.size() / totalCellsBySample.get(assayModel.cdna).size();
row.put("fraction", fraction);
@@ -700,7 +720,7 @@ private void saveRun(PipelineJob job, ExpProtocol protocol, AnalysisModel model,
JSONObject json = new JSONObject();
json.put("Run", runProps);
- File assayTmp = new File(outDir, FileUtil.makeLegalName("10x-assay-upload_" + FileUtil.getTimestamp() + ".txt"));
+ File assayTmp = FileUtil.appendName(outDir, FileUtil.makeLegalName("10x-assay-upload_" + FileUtil.getTimestamp() + ".txt"));
if (assayTmp.exists())
{
assayTmp.delete();