diff --git a/Build/Src/FwBuildTasks/Substitute.cs b/Build/Src/FwBuildTasks/Substitute.cs
index c9516f6ac7..a13517b32f 100644
--- a/Build/Src/FwBuildTasks/Substitute.cs
+++ b/Build/Src/FwBuildTasks/Substitute.cs
@@ -77,19 +77,19 @@ public override bool Execute()
var numberOfDays = Convert.ToInt32(Math.Truncate(DateTime.Now.ToOADate())).ToString();
fileContents = regex.Replace(fileContents, numberOfDays);
- // Jenkins builds should set the BUILD_NUMBER in the environment
- var buildNumber = Environment.GetEnvironmentVariable("BUILD_NUMBER");
+ // GHA builds set the RELEASE_BASE_BUILD_NUMBER in the environment
+ var buildNumber = Environment.GetEnvironmentVariable("RELEASE_BASE_BUILD_NUMBER");
if (string.IsNullOrEmpty(buildNumber))
{
- // fall back to number of days if no BUILD_NUMBER is in the environment
+ // fall back to number of days if no RELEASE_BASE_BUILD_NUMBER is in the environment
buildNumber = numberOfDays;
}
regex = new Regex("\\$BUILDNUMBER");
fileContents = regex.Replace(fileContents, buildNumber);
- // If BaseBuildish is set, this is a patch build: use BaseBuildish;
- // otherwise, this is a base build: use BUILD_NUMBER
- var baseBuildNumber = Environment.GetEnvironmentVariable("BaseBuildish");
+ // If BASE_BUILD_NUMBER is set, this is a patch build: use BASE_BUILD_NUMBER;
+ // otherwise, this is a base build: use buildNumber
+ var baseBuildNumber = Environment.GetEnvironmentVariable("BASE_BUILD_NUMBER");
if (string.IsNullOrEmpty(baseBuildNumber))
{
baseBuildNumber = buildNumber;
diff --git a/Build/mkall.targets b/Build/mkall.targets
index 10a027b6f7..38c0cdf470 100644
--- a/Build/mkall.targets
+++ b/Build/mkall.targets
@@ -329,6 +329,7 @@
+
@@ -519,6 +520,7 @@
$(PalasoNugetVersion)lib/net462/*.*$(UsingLocalLibraryBuild)
$(PalasoNugetVersion)lib/net462/*.*$(UsingLocalLibraryBuild)
9.0.0lib/net462/*.*true
+ 4.5.1lib/net461/*.*true
4.5.4lib/net461/*.*true
4.6.0lib/netstandard2.0/*.*true
7.0.0lib/net461/*.*
diff --git a/Build/nuget-common/packages.config b/Build/nuget-common/packages.config
index c20fa088b4..98f9a02921 100644
--- a/Build/nuget-common/packages.config
+++ b/Build/nuget-common/packages.config
@@ -78,6 +78,7 @@
+
diff --git a/Src/LexText/Interlinear/BIRDInterlinearImporter.cs b/Src/LexText/Interlinear/BIRDInterlinearImporter.cs
index 9275ec6b99..0ae2edcc1b 100644
--- a/Src/LexText/Interlinear/BIRDInterlinearImporter.cs
+++ b/Src/LexText/Interlinear/BIRDInterlinearImporter.cs
@@ -145,16 +145,19 @@ private static bool PopulateTextFromBIRDDoc(ref LCModel.IText newText, TextCreat
{
//If the text of the phrase was not given in the document build it from the words.
if (!textInFile)
- {
- UpdatePhraseTextForWordItems(wsFactory, ref phraseText, word, ref lastWasWord, space);
- }
- AddWordToSegment(newSegment, word);
+ UpdatePhraseTextForWordItems(wsFactory, ref phraseText, word,
+ ref lastWasWord, space);
+ var writingSystemForText =
+ TsStringUtils.IsNullOrEmpty(phraseText) ? newText.ContentsOA.MainWritingSystem : phraseText.get_WritingSystem(0);
+ AddWordToSegment(newSegment, word, writingSystemForText);
}
}
}
+
UpdateParagraphTextForPhrase(newTextPara, ref offset, phraseText);
}
}
+
return true;
}
@@ -296,10 +299,9 @@ private static bool MergeTextWithBIRDDoc(ref LCModel.IText newText, TextCreation
{
//If the text of the phrase was not found in a "txt" item for this segment then build it from the words.
if (!textInFile)
- {
- UpdatePhraseTextForWordItems(wsFactory, ref phraseText, word, ref lastWasWord, space);
- }
- MergeWordToSegment(newSegment, word);
+ UpdatePhraseTextForWordItems(wsFactory, ref phraseText, word,
+ ref lastWasWord, space);
+ MergeWordToSegment(newSegment, word, newContents.MainWritingSystem);
}
}
UpdateParagraphTextForPhrase(newTextPara, ref offset, phraseText);
@@ -477,8 +479,8 @@ private static ICmPerson FindOrCreateSpeaker(string speaker, LcmCache cache)
//find and return a person in this project whose name matches the speaker
foreach (var person in cache.LanguageProject.PeopleOA.PossibilitiesOS)
{
- if (person.Name.BestVernacularAnalysisAlternative.Text.Equals(speaker))
{
+ if (person.Name.BestVernacularAnalysisAlternative.Text.Normalize().Equals(speaker.Normalize()))
return (ICmPerson)person;
}
}
@@ -495,26 +497,27 @@ private static ICmPerson FindOrCreateSpeaker(string speaker, LcmCache cache)
return newPerson;
}
- private static void MergeWordToSegment(ISegment newSegment, Word word)
+ private static void MergeWordToSegment(ISegment newSegment, Word word, int mainWritingSystem)
{
- if(!String.IsNullOrEmpty(word.guid))
+ if (!string.IsNullOrEmpty(word.guid))
{
ICmObject repoObj;
- newSegment.Cache.ServiceLocator.ObjectRepository.TryGetObject(new Guid(word.guid), out repoObj);
- IAnalysis modelWord = repoObj as IAnalysis;
- if(modelWord != null)
+ newSegment.Cache.ServiceLocator.ObjectRepository.TryGetObject(new Guid(word.guid),
+ out repoObj);
+ var modelWord = repoObj as IAnalysis;
+ if (modelWord != null)
{
UpgradeToWordGloss(word, ref modelWord);
newSegment.AnalysesRS.Add(modelWord);
}
else
{
- AddWordToSegment(newSegment, word);
+ AddWordToSegment(newSegment, word, mainWritingSystem);
}
}
else
{
- AddWordToSegment(newSegment, word);
+ AddWordToSegment(newSegment, word, mainWritingSystem);
}
}
@@ -705,11 +708,12 @@ private static ILgWritingSystem SafelyGetWritingSystem(LcmCache cache, ILgWritin
return writingSystem;
}
- private static void AddWordToSegment(ISegment newSegment, Word word)
+ private static void AddWordToSegment(ISegment newSegment, Word word,
+ int mainWritingSystem)
{
//use the items under the word to determine what kind of thing to add to the segment
var cache = newSegment.Cache;
- IAnalysis analysis = CreateWordAnalysisStack(cache, word);
+ var analysis = CreateWordformWithWfiAnalysis(cache, word, mainWritingSystem);
// Add to segment
if (analysis != null)
@@ -718,89 +722,48 @@ private static void AddWordToSegment(ISegment newSegment, Word word)
}
}
- private static IAnalysis CreateWordAnalysisStack(LcmCache cache, Word word)
+ private static IAnalysis CreateWordformWithWfiAnalysis(LcmCache cache, Word word, int mainWritingSystem)
{
- if (word.Items == null || word.Items.Length <= 0) return null;
- IAnalysis analysis = null;
+ if (FindOrCreateWfiAnalysis(cache, word, mainWritingSystem, out var matchingWf)
+ || matchingWf is IPunctuationForm)
+ {
+ return matchingWf;
+ }
+ IAnalysis wordForm = matchingWf;
var wsFact = cache.WritingSystemFactory;
ILgWritingSystem wsMainVernWs = null;
IWfiMorphBundle bundle = null;
- foreach (var wordItem in word.Items)
- {
- if (wordItem.Value == null)
- continue;
- ITsString wordForm = null;
- switch (wordItem.type)
- {
- case "txt":
- wsMainVernWs = GetWsEngine(wsFact, wordItem.lang);
- wordForm = TsStringUtils.MakeString(wordItem.Value, wsMainVernWs.Handle);
- analysis = WfiWordformServices.FindOrCreateWordform(cache, wordForm);
- break;
- case "punct":
- wordForm = TsStringUtils.MakeString(wordItem.Value,
- GetWsEngine(wsFact, wordItem.lang).Handle);
- analysis = WfiWordformServices.FindOrCreatePunctuationform(cache, wordForm);
- break;
- }
- if (wordForm != null)
- break;
- }
-
- // now add any alternative word forms. (overwrite any existing)
- if (analysis != null && analysis.HasWordform)
- {
- AddAlternativeWssToWordform(analysis, word, wsMainVernWs);
- }
-
- if (analysis != null)
- {
- UpgradeToWordGloss(word, ref analysis);
- }
+ if (wordForm != null)
+ UpgradeToWordGloss(word, ref wordForm);
else
- {
// There was an invalid analysis in the file. We can't do anything with it.
return null;
- }
// Fill in morphemes, lex. entries, lex. gloss, and lex.gram.info
if (word.morphemes != null && word.morphemes.morphs.Length > 0)
{
- ILexEntryRepository lex_entry_repo = cache.ServiceLocator.GetInstance();
- IMoMorphSynAnalysisRepository msa_repo = cache.ServiceLocator.GetInstance();
- int morphIdx = 0;
+ var lex_entry_repo = cache.ServiceLocator.GetInstance();
+ var msa_repo = cache.ServiceLocator.GetInstance();
foreach (var morpheme in word.morphemes.morphs)
{
var itemDict = new Dictionary>();
- if (analysis.Analysis == null)
- {
+ if (wordForm.Analysis == null)
break;
- }
- foreach (item item in morpheme.items)
- {
+ foreach (var item in morpheme.items)
itemDict[item.type] = new Tuple(item.lang, item.Value);
- }
if (itemDict.ContainsKey("txt")) // Morphemes
{
- int ws = GetWsEngine(wsFact, itemDict["txt"].Item1).Handle;
+ var ws = GetWsEngine(wsFact, itemDict["txt"].Item1).Handle;
var morphForm = itemDict["txt"].Item2;
- ITsString wf = TsStringUtils.MakeString(morphForm, ws);
+ var wf = TsStringUtils.MakeString(morphForm, ws);
- // If we already have a bundle use that one
- bundle = analysis.Analysis.MorphBundlesOS.ElementAtOrDefault(morphIdx);
- if (bundle == null || bundle.Form.get_String(ws).Text != morphForm)
- {
- // Otherwise create a new bundle and add it to analysis
- bundle = cache.ServiceLocator.GetInstance().Create();
- if (analysis.Analysis.MorphBundlesOS.Count >= word.morphemes.morphs.Length)
- {
- analysis.Analysis.MorphBundlesOS.RemoveAt(morphIdx);
- }
- analysis.Analysis.MorphBundlesOS.Insert(morphIdx, bundle);
- }
+ // Otherwise create a new bundle and add it to analysis
+ bundle = cache.ServiceLocator.GetInstance()
+ .Create();
+ wordForm.Analysis.MorphBundlesOS.Add(bundle);
bundle.Form.set_String(ws, wf);
}
@@ -842,51 +805,262 @@ private static IAnalysis CreateWordAnalysisStack(LcmCache cache, Word word)
bundle.MsaRA = match;
}
}
- morphIdx++;
}
}
- return analysis;
+
+ return wordForm;
}
- ///
- /// add any alternative forms (in alternative writing systems) to the wordform.
- /// Overwrite any existing alternative form in a given alternative writing system.
- ///
- private static void AddAlternativeWssToWordform(IAnalysis analysis, Word word, ILgWritingSystem wsMainVernWs)
+ private static bool FindOrCreateWfiAnalysis(LcmCache cache, Word word,
+ int mainWritingSystem,
+ out IAnalysis analysis)
{
- ILgWritingSystemFactory wsFact = analysis.Cache.WritingSystemFactory;
- var wf = analysis.Wordform;
+ var wsFact = cache.WritingSystemFactory;
+
+ // First, collect all expected forms and glosses from the Word
+ var expectedForms = new Dictionary(); // wsHandle -> expected value
+ var expectedGlosses = new Dictionary(); // wsHandle -> expected gloss
+ IAnalysis candidateForm = null;
+ ITsString wordForm = null;
+ ITsString punctForm = null;
+
foreach (var wordItem in word.Items)
{
+ if (wordItem.Value == null)
+ continue;
+
+ var ws = GetWsEngine(wsFact, wordItem.lang);
+
switch (wordItem.type)
{
case "txt":
- var wsAlt = GetWsEngine(wsFact, wordItem.lang);
- if (wsAlt.Handle == wsMainVernWs.Handle)
+ wordForm = TsStringUtils.MakeString(wordItem.Value, ws.Handle);
+ expectedForms[ws.Handle] = wordItem.Value;
+
+ // Try to find a candidate wordform if we haven't found one yet
+ if (candidateForm == null)
+ {
+ candidateForm = cache.ServiceLocator
+ .GetInstance()
+ .GetMatchingWordform(ws.Handle, wordItem.Value);
+ }
+
+ break;
+
+ case "punct":
+ punctForm = TsStringUtils.MakeString(wordItem.Value, ws.Handle);
+ expectedForms[ws.Handle] = wordItem.Value;
+
+ if (candidateForm == null)
+ {
+ IPunctuationForm pf;
+ if (cache.ServiceLocator.GetInstance()
+ .TryGetObject(punctForm, out pf))
+ {
+ candidateForm = pf;
+ }
+ }
+
+ break;
+
+ case "gls":
+ // Only consider human-approved glosses
+ if (wordItem.analysisStatusSpecified &&
+ wordItem.analysisStatus != analysisStatusTypes.humanApproved)
continue;
- ITsString wffAlt = TsStringUtils.MakeString(wordItem.Value, wsAlt.Handle);
- if (wffAlt.Length > 0)
- wf.Form.set_String(wsAlt.Handle, wffAlt);
+
+ expectedGlosses[ws.Handle] = wordItem.Value;
break;
}
}
+
+ if (candidateForm == null || !MatchPrimaryFormAndAddMissingAlternatives(candidateForm, expectedForms, mainWritingSystem))
+ {
+ analysis = CreateMissingForm(cache, wordForm, punctForm, expectedForms);
+ return false;
+ }
+
+ var candidateWordform = candidateForm as IWfiWordform;
+ if (candidateWordform == null)
+ {
+ // candidate is a punctuation form, nothing else to match
+ analysis = candidateForm;
+ return true;
+ }
+ analysis = candidateWordform;
+ // If no glosses or morphemes are expected the wordform itself is the match
+ if (expectedGlosses.Count == 0
+ && (word.morphemes == null || word.morphemes.morphs.Length == 0))
+ {
+ analysis = GetMostSpecificAnalysisForWordForm(candidateWordform);
+ return true;
+ }
+
+ // Look for an analysis that has the correct morphemes and a matching gloss
+ foreach (var wfiAnalysis in candidateWordform.AnalysesOC)
+ {
+ var morphemeMatch = true;
+ // verify that the analysis has a Morph Bundle with the expected morphemes from the import
+ if (word.morphemes != null && wfiAnalysis.MorphBundlesOS.Count == word.morphemes?.morphs.Length)
+ {
+ analysis = GetMostSpecificAnalysisForWordForm(wfiAnalysis);
+ for(var i = 0; i < wfiAnalysis.MorphBundlesOS.Count; ++i)
+ {
+ var extantMorphForm = wfiAnalysis.MorphBundlesOS[i].Form;
+ var importMorphForm = word.morphemes.morphs[i].items.FirstOrDefault(item => item.type == "txt");
+ var importFormWs = GetWsEngine(wsFact, importMorphForm?.lang);
+ // compare the import item to the extant morph form
+ if (importMorphForm == null || extantMorphForm == null ||
+ TsStringUtils.IsNullOrEmpty(extantMorphForm.get_String(importFormWs.Handle)) ||
+ !extantMorphForm.get_String(importFormWs.Handle).Text.Normalize()
+ .Equals(importMorphForm.Value?.Normalize()))
+ {
+ morphemeMatch = false;
+ break;
+ }
+ }
+ }
+
+ if (morphemeMatch)
+ {
+ var matchingGloss = wfiAnalysis.MeaningsOC.FirstOrDefault(g => VerifyGlossesMatch(g, expectedGlosses));
+ if (matchingGloss != null)
+ {
+ analysis = matchingGloss;
+ return true;
+ }
+ }
+ }
+
+ // No matching analysis found with all expected gloss and morpheme data
+ analysis = AddEmptyAnalysisToWordform(cache, candidateWordform);
+ return false;
+ }
+
+ private static IAnalysis GetMostSpecificAnalysisForWordForm(IAnalysis candidateWordform)
+ {
+ var analysisTree = new AnalysisTree(candidateWordform);
+ if(analysisTree.Gloss != null)
+ return analysisTree.Gloss;
+ if(analysisTree.WfiAnalysis != null)
+ return analysisTree.WfiAnalysis;
+ return candidateWordform;
+ }
+
+ private static IAnalysis CreateMissingForm(LcmCache cache, ITsString wordFormText,
+ ITsString punctFormText, Dictionary expectedForms)
+ {
+ if (wordFormText != null)
+ {
+ var wordForm = cache.ServiceLocator.GetInstance().Create(wordFormText);
+ foreach (var expected in expectedForms)
+ {
+ var wsHandle = expected.Key;
+ var expectedValue = expected.Value;
+ if (TsStringUtils.IsNullOrEmpty(wordForm.Form.get_String(wsHandle)))
+ {
+ wordForm.Form.set_String(wsHandle, TsStringUtils.MakeString(expectedValue, wsHandle));
+ }
+ }
+ return wordForm;
+ }
+ if (punctFormText != null)
+ {
+ var punctForm = cache.ServiceLocator.GetInstance().Create();
+ punctForm.Form = punctFormText;
+ return punctForm;
+ }
+
+ return null;
+ }
+
+ private static IAnalysis AddEmptyAnalysisToWordform(LcmCache cache, IWfiWordform owningWordform)
+ {
+ var analysis = cache.ServiceLocator.GetInstance().Create();
+ owningWordform.AnalysesOC.Add(analysis);
+ return analysis;
+ }
+
+ ///
+ /// Match the wordform or punctuation form on the first vernacular writing system.
+ /// Add any extra writing system data if the import data has it, but do not overwrite what is
+ /// already in the cache.
+ /// If there is not a match on the primary vernacular form nothing is set and false is returned
+ ///
+ private static bool MatchPrimaryFormAndAddMissingAlternatives(IAnalysis wordForm,
+ Dictionary expectedForms, int mainWritingSystem)
+ {
+ IWfiWordform wf = null;
+ IPunctuationForm pf = null;
+
+ // Assign wf or pf based on the type of wordForm
+ switch (wordForm)
+ {
+ case IWfiWordform wordFormAsWf:
+ wf = wordFormAsWf;
+ break;
+ case IPunctuationForm wordFormAsPf:
+ pf = wordFormAsPf;
+ break;
+ }
+
+ // We could have ended up here if there was a matched on an alternative writing system
+ if(!expectedForms.TryGetValue(mainWritingSystem, out _))
+ return false;
+
+ foreach (var kvp in expectedForms)
+ {
+ var wsHandle = kvp.Key;
+ var expectedValue = kvp.Value;
+ var storedForm = wf?.Form.get_String(wsHandle) ?? pf?.GetForm(wsHandle);
+ var newForm = TsStringUtils.MakeString(expectedValue, wsHandle);
+ if (TsStringUtils.IsNullOrEmpty(storedForm)) // Extra data found in the import
+ {
+ if (wf != null)
+ {
+ wf.Form.set_String(wsHandle, newForm);
+ }
+ else if (pf != null)
+ {
+ pf.Form = newForm;
+ }
+ }
+ }
+ return true;
+ }
+
+ // Helper method to verify that all expected glosses match the stored glosses
+ private static bool VerifyGlossesMatch(IWfiGloss wfiGloss,
+ Dictionary expectedGlosses)
+ {
+ foreach (var expectedGloss in expectedGlosses)
+ {
+ var wsHandle = expectedGloss.Key;
+ var expectedValue = expectedGloss.Value;
+
+ var storedGloss = wfiGloss.Form.get_String(wsHandle);
+ if (storedGloss == null || storedGloss.Text != expectedValue)
+ return false; // Mismatch found
+ }
+
+ return true;
}
///
- ///
///
- ///
- /// the new analysis Gloss. If multiple glosses, returns the last one created.
- private static void UpgradeToWordGloss(Word word, ref IAnalysis analysis)
+ /// The word Gloss. If multiple glosses, returns the last one created.
+ private static void UpgradeToWordGloss(Word word, ref IAnalysis wordForm)
{
- LcmCache cache = analysis.Cache;
+ var cache = wordForm.Cache;
var wsFact = cache.WritingSystemFactory;
if (s_importOptions.AnalysesLevel == ImportAnalysesLevel.WordGloss)
{
// test for adding multiple glosses in the same language. If so, create separate analyses with separate glosses.
- bool fHasMultipleGlossesInSameLanguage = false;
+ var fHasMultipleGlossesInSameLanguage = false;
var dictMapLangToGloss = new Dictionary();
- foreach (var wordGlossItem in word.Items.Select(i => i).Where(i => i.type == "gls"))
+ var processedGlossLangs = new HashSet();
+ foreach (var wordGlossItem in word.Items.Select(i => i)
+ .Where(i => i.type == "gls"))
{
string gloss;
if (!dictMapLangToGloss.TryGetValue(wordGlossItem.lang, out gloss))
@@ -894,61 +1068,64 @@ private static void UpgradeToWordGloss(Word word, ref IAnalysis analysis)
dictMapLangToGloss.Add(wordGlossItem.lang, wordGlossItem.Value);
continue;
}
- if (wordGlossItem.Value == gloss) continue;
+
+ if (wordGlossItem.Value.Normalize().Equals(gloss?.Normalize())) continue;
fHasMultipleGlossesInSameLanguage = true;
break;
}
- AnalysisTree analysisTree = null;
- foreach (var wordGlossItem in word.Items.Select(i => i).Where(i => i.type == "gls"))
+ AnalysisTree analysisTree = new AnalysisTree(wordForm);
+ foreach (var wordGlossItem in word.Items.Select(i => i)
+ .Where(i => i.type == "gls"))
{
- if (wordGlossItem == null) continue;
if (wordGlossItem.analysisStatusSpecified &&
- wordGlossItem.analysisStatus != analysisStatusTypes.humanApproved) continue;
+ wordGlossItem.analysisStatus != analysisStatusTypes.humanApproved)
+ continue;
// first make sure that an existing gloss does not already exist. (i.e. don't add duplicate glosses)
- int wsNewGloss = GetWsEngine(wsFact, wordGlossItem.lang).Handle;
- ITsString newGlossTss = TsStringUtils.MakeString(wordGlossItem.Value,
- wsNewGloss);
- var wfiWord = analysis.Wordform;
- bool hasGlosses = wfiWord.AnalysesOC.Any(wfia => wfia.MeaningsOC.Any());
- IWfiGloss matchingGloss = null;
- if (hasGlosses)
- {
- foreach (var wfa in wfiWord.AnalysesOC)
- {
- matchingGloss = wfa.MeaningsOC.FirstOrDefault(wfg => wfg.Form.get_String(wsNewGloss).Equals(newGlossTss));
- if (matchingGloss != null)
- break;
- }
- }
+ var wsNewGloss = GetWsEngine(wsFact, wordGlossItem.lang).Handle;
+ var wfiWord = wordForm.Wordform;
- if (matchingGloss != null)
- analysis = matchingGloss;
- else
+ if (fHasMultipleGlossesInSameLanguage && processedGlossLangs.Contains(wordGlossItem.lang))
+ // create a new WfiAnalysis to store a new gloss
+ analysisTree = WordAnalysisOrGlossServices.CreateNewAnalysisTreeGloss(wfiWord);
+ // else, reuse the same analysisTree for setting a gloss alternative
+ if (analysisTree.Gloss == null)
{
- // TODO: merge with analysis having same morpheme breakdown (or at least the same stem)
-
- if (analysisTree == null || dictMapLangToGloss.Count == 1 || fHasMultipleGlossesInSameLanguage)
+ var wfiGloss = cache.ServiceLocator.GetInstance().Create();
+ var analysis = analysisTree.WfiAnalysis;
+ if (analysis == null)
{
- // create a new WfiAnalysis to store a new gloss
- analysisTree = WordAnalysisOrGlossServices.CreateNewAnalysisTreeGloss(wfiWord);
+ analysis = (IWfiAnalysis)AddEmptyAnalysisToWordform(cache, wfiWord);
}
- // else, reuse the same analysisTree for setting a gloss alternative
-
- analysisTree.Gloss.Form.set_String(wsNewGloss, wordGlossItem.Value);
+ analysis.MeaningsOC.Add(wfiGloss);
+ analysisTree = new AnalysisTree(wfiGloss);
+ }
+ analysisTree.Gloss.Form.set_String(wsNewGloss, wordGlossItem.Value);
+ if (word.morphemes?.analysisStatus != analysisStatusTypes.guess)
// Make sure this analysis is marked as user-approved (green check mark)
- cache.LangProject.DefaultUserAgent.SetEvaluation(analysisTree.WfiAnalysis, Opinions.approves);
+ cache.LangProject.DefaultUserAgent.SetEvaluation(
+ analysisTree.WfiAnalysis, Opinions.approves);
+ wordForm = analysisTree.Gloss;
+ // If there are no morphemes defined for the word define a single one for the word.
+ if(word.morphemes == null || word.morphemes.morphs.Length == 0)
+ {
// Create a morpheme form that matches the wordform.
- var morphemeBundle = cache.ServiceLocator.GetInstance().Create();
+ var morphemeBundle = cache.ServiceLocator
+ .GetInstance().Create();
var wordItem = word.Items.Select(i => i).First(i => i.type == "txt");
- int wsWord = GetWsEngine(wsFact, wordItem.lang).Handle;
+ var wsWord = GetWsEngine(wsFact, wordItem.lang).Handle;
analysisTree.WfiAnalysis.MorphBundlesOS.Add(morphemeBundle);
morphemeBundle.Form.set_String(wsWord, wordItem.Value);
- analysis = analysisTree.Gloss;
}
+
+ processedGlossLangs.Add(wordGlossItem.lang);
}
}
+
+ if (wordForm != null && word.morphemes?.analysisStatus == analysisStatusTypes.guess)
+ // Ignore gloss if morphological analysis was only a guess.
+ wordForm = wordForm.Wordform;
}
///
@@ -1020,4 +1197,4 @@ private static void SetTextMetaAndMergeMedia(LcmCache cache, Interlineartext int
}
}
}
-}
+}
\ No newline at end of file
diff --git a/Src/LexText/Interlinear/ChooseAnalysisHandler.cs b/Src/LexText/Interlinear/ChooseAnalysisHandler.cs
index c83feb2268..bdee5229be 100644
--- a/Src/LexText/Interlinear/ChooseAnalysisHandler.cs
+++ b/Src/LexText/Interlinear/ChooseAnalysisHandler.cs
@@ -3,16 +3,17 @@
// (http://www.gnu.org/licenses/lgpl-2.1.html)
using System;
-using System.Windows.Forms;
using System.Drawing;
-using SIL.LCModel;
+using System.Linq;
+using System.Windows.Forms;
using SIL.FieldWorks.Common.ViewsInterfaces;
-using SIL.LCModel.DomainServices;
-using SIL.FieldWorks.FdoUi;
-using SIL.LCModel.Utils;
using SIL.FieldWorks.Common.Widgets;
-using SIL.LCModel.Core.Text;
+using SIL.FieldWorks.FdoUi;
+using SIL.LCModel;
using SIL.LCModel.Core.KernelInterfaces;
+using SIL.LCModel.Core.Text;
+using SIL.LCModel.DomainServices;
+using SIL.LCModel.Utils;
namespace SIL.FieldWorks.IText
@@ -311,9 +312,7 @@ void AddAnalysisItems(IWfiAnalysis wa)
{
AddItem(wa,
MakeAnalysisStringRep(wa, m_cache, StyleSheet != null, (m_owner as SandboxBase).RawWordformWs), true);
- var guess_services = new AnalysisGuessServices(m_cache);
- var sorted_glosses = guess_services.GetSortedGlossGuesses(wa, m_occurrence);
- foreach (var gloss in sorted_glosses)
+ foreach (var gloss in wa.MeaningsOC.ToList())
{
AddItem(gloss, MakeGlossStringRep(gloss, m_cache, StyleSheet != null), true);
}
diff --git a/Src/LexText/Interlinear/ITextDllTests/BIRDFormatImportTests.cs b/Src/LexText/Interlinear/ITextDllTests/BIRDFormatImportTests.cs
index b361a988fb..dc0e205ba1 100644
--- a/Src/LexText/Interlinear/ITextDllTests/BIRDFormatImportTests.cs
+++ b/Src/LexText/Interlinear/ITextDllTests/BIRDFormatImportTests.cs
@@ -134,21 +134,6 @@ public void ValidateScrBookAttribute()
Assert.DoesNotThrow(() => ReadXmlForValidation(xmlReader));
}
- [Test]
- [Ignore("EricP: Add valid ScrBook values to the schema? (e.g. GEN, MAT)...or reference an external schema for those?")]
- public void InvalidScrBookAttributeValue()
- {
- const string xml = "" +
- "" +
- "";
-
- XmlReader xmlReader = GetXmlReaderForTest(xml);
- var ex = Assert.Throws(() => ReadXmlForValidation(xmlReader));
- // TODO-Linux: The message on Mono doesn't state the failing attribute
- if (!Platform.IsMono)
- Assert.That(ex.Message, Is.EqualTo("The 'scrSectionType' attribute is invalid - The value 'invalid' is invalid according to its datatype 'scrSectionTypes' - The Enumeration constraint failed."));
- }
-
[Test]
public void ValidateScrSectionTypeAttributes()
{
diff --git a/Src/LexText/Interlinear/ITextDllTests/ImportInterlinearAnalysesTests.cs b/Src/LexText/Interlinear/ITextDllTests/ImportInterlinearAnalysesTests.cs
index ea018e48c6..cb9fa59b27 100644
--- a/Src/LexText/Interlinear/ITextDllTests/ImportInterlinearAnalysesTests.cs
+++ b/Src/LexText/Interlinear/ITextDllTests/ImportInterlinearAnalysesTests.cs
@@ -240,6 +240,342 @@ public void SkipNewGuessedWordGloss()
}
}
+ [Test]
+ public void ImportMorphemes_WhenAllMorphemesMatch_ExistingWifiAnalysisAreUsed()
+ {
+ // 1. Build pre-existing data with a known wordform and morphemes ("cat", "-s")
+ var sl = Cache.ServiceLocator;
+ LCModel.IText text;
+ IStTxtPara para = null;
+ IWfiWordform extantWordform = null;
+ var segGuid = Guid.Empty;
+ NonUndoableUnitOfWorkHelper.Do(Cache.ActionHandlerAccessor, () =>
+ {
+ text = sl.GetInstance().Create(Cache,
+ new Guid("BBBBBBBB-BBBB-BBBB-BBBB-BBBBBBBBBBBB"));
+ var sttext = sl.GetInstance().Create();
+ text.ContentsOA = sttext;
+ para = sl.GetInstance().Create();
+ sttext.ParagraphsOS.Add(para);
+
+ var segment = sl.GetInstance().Create();
+ para.SegmentsOS.Add(segment);
+ segGuid = segment.Guid;
+
+ // Use the helper method to create a wordform with an analysis and two morph bundles and a gloss
+ extantWordform = BuildWordformWithMorphemes();
+ // Add the gloss analysis to the segment
+ segment.AnalysesRS.Add(extantWordform.AnalysesOC.First().MeaningsOC.First());
+ });
+
+ // Get initial object counts for verification
+ var initialWordformCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var initialAnalysisCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var initialGlossCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var initialMorphBundleCount =
+ Cache.ServiceLocator.GetInstance().Count;
+
+ // 2. Create XML for import where the morphemes match the existing ones
+ var xml = "" +
+ "" +
+ "" +
+ "- cats
" +
+ "- gato
" +
+ "" +
+ "- cat
" +
+ "- -s
" +
+ "" +
+ "" +
+ "";
+
+ // 3. Perform the import
+ var li = new BIRDFormatImportTests.LLIMergeExtension(Cache, null, null);
+ var options = CreateImportInterlinearOptions(xml);
+ LCModel.IText importedText = null;
+ li.ImportInterlinear(options, ref importedText);
+
+ // 4. Verify that no new objects were created
+ var finalWordformCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var finalAnalysisCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var finalGlossCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var finalMorphBundleCount =
+ Cache.ServiceLocator.GetInstance().Count;
+
+ Assert.That(finalWordformCount, Is.EqualTo(initialWordformCount),
+ "A new Wordform should not have been created.");
+ Assert.That(finalAnalysisCount, Is.EqualTo(initialAnalysisCount),
+ "A new Analysis should not have been created.");
+ Assert.That(finalGlossCount, Is.EqualTo(initialGlossCount),
+ "A new Gloss should not have been created.");
+ Assert.That(finalMorphBundleCount, Is.EqualTo(initialMorphBundleCount),
+ "New MorphBundles should not have been created.");
+
+ // Verify the imported analysis is the same object
+ var importedPara = importedText.ContentsOA.ParagraphsOS[0] as IStTxtPara;
+ var importedAnalysis = importedPara.SegmentsOS[0].AnalysesRS[0];
+ Assert.That(importedAnalysis, Is.SameAs(extantWordform.AnalysesOC.First().MeaningsOC.First()),
+ "The imported analysis should be the same as the original.");
+ }
+
+ [Test]
+ public void ImportNewText_PhraseWsUsedForMatching()
+ {
+ // 1. Build pre-existing data with a known wordform and morphemes ("cat", "-s")
+ var sl = Cache.ServiceLocator;
+ LCModel.IText text;
+ IStTxtPara para = null;
+ IWfiWordform extantWordform = null;
+ var segGuid = Guid.Empty;
+ NonUndoableUnitOfWorkHelper.Do(Cache.ActionHandlerAccessor, () =>
+ {
+ Cache.LangProject.AddToCurrentVernacularWritingSystems(new CoreWritingSystemDefinition("pt"));
+ text = sl.GetInstance().Create(Cache,
+ new Guid("CCCCCCCC-DDDD-CCCC-CCCC-CCCCCCCCCCCC"));
+ var sttext = sl.GetInstance().Create();
+ text.ContentsOA = sttext;
+ para = sl.GetInstance().Create();
+ sttext.ParagraphsOS.Add(para);
+
+ var segment = sl.GetInstance().Create();
+ para.SegmentsOS.Add(segment);
+ segGuid = segment.Guid;
+
+ extantWordform = BuildWordformWithMorphemes("pt");
+ segment.AnalysesRS.Add(extantWordform);
+ });
+
+ // Get initial object counts for verification
+ var initialWordformCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var initialAnalysisCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var initialGlossCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var initialMorphBundleCount =
+ Cache.ServiceLocator.GetInstance().Count;
+
+ // 2. Create XML for import with a different second morpheme ("cat", "-ing")
+ var xml = "" +
+ "" +
+ "- cats
" +
+ "" +
+ "" +
+ "- cats
" +
+ "- gato
" +
+ "" +
+ "- cat
" +
+ "- -s
" +
+ "" +
+ "" +
+ "";
+
+ // 3. Perform the import
+ var li = new BIRDFormatImportTests.LLIMergeExtension(Cache, null, null);
+ var options = CreateImportInterlinearOptions(xml);
+ LCModel.IText importedText = null;
+ li.ImportInterlinear(options, ref importedText);
+
+ // 4. Verify that no new objects were created
+ var finalWordformCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var finalAnalysisCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var finalGlossCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var finalMorphBundleCount =
+ Cache.ServiceLocator.GetInstance().Count;
+
+ Assert.That(finalWordformCount, Is.EqualTo(initialWordformCount),
+ "A new Wordform should not have been created.");
+ Assert.That(finalAnalysisCount, Is.EqualTo(initialAnalysisCount),
+ "A new Analysis should not have been created.");
+ Assert.That(finalGlossCount, Is.EqualTo(initialGlossCount),
+ "A new Gloss should not have been created.");
+ Assert.That(finalMorphBundleCount, Is.EqualTo(initialMorphBundleCount),
+ "New MorphBundles should not have been created.");
+
+ // Verify the imported analysis is the same object
+ var importedPara = importedText.ContentsOA.ParagraphsOS[0] as IStTxtPara;
+ var importedAnalysis = importedPara.SegmentsOS[0].AnalysesRS[0];
+ Assert.That(importedAnalysis, Is.SameAs(extantWordform.AnalysesOC.First().MeaningsOC.First()),
+ "The imported analysis should be the same as the original.");
+ }
+
+ [Test]
+ public void ImportMorphemes_WhenMorphemesDoNotMatch_WordFormGetsNewWfiAnalysis()
+ {
+ // 1. Build pre-existing data with a known wordform and morphemes ("cat", "-s")
+ var sl = Cache.ServiceLocator;
+ LCModel.IText text;
+ IStTxtPara para = null;
+ IWfiWordform extantWordform = null;
+ var segGuid = Guid.Empty;
+ NonUndoableUnitOfWorkHelper.Do(Cache.ActionHandlerAccessor, () =>
+ {
+ text = sl.GetInstance().Create(Cache,
+ new Guid("CCCCCCCC-DDDD-CCCC-CCCC-CCCCCCCCCCCC"));
+ var sttext = sl.GetInstance().Create();
+ text.ContentsOA = sttext;
+ para = sl.GetInstance().Create();
+ sttext.ParagraphsOS.Add(para);
+
+ var segment = sl.GetInstance().Create();
+ para.SegmentsOS.Add(segment);
+ segGuid = segment.Guid;
+
+ extantWordform = BuildWordformWithMorphemes();
+ segment.AnalysesRS.Add(extantWordform);
+ });
+
+ // Get initial object counts for verification
+ var initialWordformCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var initialAnalysisCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var initialMorphBundleCount =
+ Cache.ServiceLocator.GetInstance().Count;
+
+ // 2. Create XML for import with a different second morpheme ("cat", "-ing")
+ var xml = "" +
+ "" +
+ "" +
+ "- cats
" +
+ "- gato
" +
+ "" +
+ "- cat
" +
+ "- -ing
" +
+ "" +
+ "" +
+ "";
+
+ // 3. Perform the import
+ var li = new BIRDFormatImportTests.LLIMergeExtension(Cache, null, null);
+ var options = CreateImportInterlinearOptions(xml);
+ LCModel.IText importedText = null;
+ li.ImportInterlinear(options, ref importedText);
+
+ // 4. Verify that new objects were created due to the mismatch
+ var finalWordformCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var finalAnalysisCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var finalMorphBundleCount =
+ Cache.ServiceLocator.GetInstance().Count;
+
+ Assert.That(finalWordformCount, Is.EqualTo(initialWordformCount),
+ "Wordform count should not change.");
+ Assert.That(finalAnalysisCount, Is.EqualTo(initialAnalysisCount + 1),
+ "A new Analysis should have been created.");
+ Assert.That(finalMorphBundleCount, Is.EqualTo(initialMorphBundleCount + 2),
+ "Two new MorphBundles should have been created.");
+
+ // Verify the imported analysis and its contents
+ var importedPara = importedText.ContentsOA.ParagraphsOS[0] as IStTxtPara;
+ if(!(importedPara.SegmentsOS[0].AnalysesRS[0] is IWfiGloss importedAnalysis))
+ Assert.Fail("Incorrect analysis type imported");
+ else
+ {
+ Assert.That(importedAnalysis.Analysis.MorphBundlesOS.Count, Is.EqualTo(2),
+ "The new analysis should have two morph bundles.");
+ Assert.That(
+ importedAnalysis.Analysis.MorphBundlesOS[0].Form.get_String(Cache.DefaultVernWs).Text,
+ Is.EqualTo("cat"));
+ Assert.That(
+ importedAnalysis.Analysis.MorphBundlesOS[1].Form.get_String(Cache.DefaultVernWs).Text,
+ Is.EqualTo("-ing"));
+ }
+ }
+
+ [Test]
+ public void ImportMorphemes_WhenMorphemesMatchButOutOfOrder_NewObjectsAreCreated()
+ {
+ // 1. Build pre-existing data with a known wordform and morphemes ("cat", "-s")
+ var sl = Cache.ServiceLocator;
+ LCModel.IText text;
+ IStTxtPara para = null;
+ IWfiWordform extantWordform = null;
+ var segGuid = Guid.Empty;
+ NonUndoableUnitOfWorkHelper.Do(Cache.ActionHandlerAccessor, () =>
+ {
+ text = sl.GetInstance().Create(Cache,
+ new Guid("DDDDDDDD-DDDD-DDDD-DDDD-DDDDDDDDDDDD"));
+ var sttext = sl.GetInstance().Create();
+ text.ContentsOA = sttext;
+ para = sl.GetInstance().Create();
+ sttext.ParagraphsOS.Add(para);
+
+ var segment = sl.GetInstance().Create();
+ para.SegmentsOS.Add(segment);
+ segGuid = segment.Guid;
+
+ extantWordform = BuildWordformWithMorphemes();
+ segment.AnalysesRS.Add(extantWordform);
+ });
+
+ // Get initial object counts for verification
+ var initialWordformCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var initialAnalysisCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var initialMorphBundleCount =
+ Cache.ServiceLocator.GetInstance().Count;
+
+ // 2. Create XML for import where the morphemes are the same but the order is reversed
+ var xml = "" +
+ "" +
+ "" +
+ "- cats
" +
+ "" +
+ "- -s
" +
+ "- cat
" +
+ "" +
+ "" +
+ "";
+
+ // 3. Perform the import
+ var li = new BIRDFormatImportTests.LLIMergeExtension(Cache, null, null);
+ var options = CreateImportInterlinearOptions(xml);
+ LCModel.IText importedText = null;
+ li.ImportInterlinear(options, ref importedText);
+
+ // 4. Verify that new objects were created due to the order mismatch
+ var finalWordformCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var finalAnalysisCount =
+ Cache.ServiceLocator.GetInstance().Count;
+ var finalMorphBundleCount =
+ Cache.ServiceLocator.GetInstance().Count;
+
+ Assert.That(finalWordformCount, Is.EqualTo(initialWordformCount),
+ "Wordform count should not change.");
+ Assert.That(finalAnalysisCount, Is.EqualTo(initialAnalysisCount + 1),
+ "A new Analysis should have been created.");
+ Assert.That(finalMorphBundleCount, Is.EqualTo(initialMorphBundleCount + 2),
+ "Two new MorphBundles should have been created.");
+
+ // Verify the imported analysis and its contents
+ var importedPara = importedText.ContentsOA.ParagraphsOS[0] as IStTxtPara;
+ if(!(importedPara.SegmentsOS[0].AnalysesRS[0] is IWfiAnalysis importedAnalysis))
+ Assert.Fail("Incorrect analysis type imported");
+ else
+ {
+ Assert.That(importedAnalysis.MorphBundlesOS.Count, Is.EqualTo(2),
+ "The new analysis should have two morph bundles.");
+ Assert.That(
+ importedAnalysis.MorphBundlesOS[0].Form.get_String(Cache.DefaultVernWs).Text,
+ Is.EqualTo("-s"));
+ Assert.That(
+ importedAnalysis.MorphBundlesOS[1].Form.get_String(Cache.DefaultVernWs).Text,
+ Is.EqualTo("cat"));
+ }
+ }
+
[Test]
public void ImportNewUserConfirmedWordGlossToExistingWord()
{
@@ -249,7 +585,7 @@ public void ImportNewUserConfirmedWordGlossToExistingWord()
LCModel.IText text;
IStTxtPara para = null;
- IWfiWordform word = null;
+ IWfiWordform extantWordForm = null;
ITsString paraContents = null;
Guid segGuid = new Guid();
NonUndoableUnitOfWorkHelper.Do(Cache.ActionHandlerAccessor, () =>
@@ -266,8 +602,8 @@ public void ImportNewUserConfirmedWordGlossToExistingWord()
ITsString wform = TsStringUtils.MakeString("supercalifragilisticexpialidocious",
wsf.get_Engine("en").Handle);
segGuid = segment.Guid;
- word = sl.GetInstance().Create(wform);
- segment.AnalysesRS.Add(word);
+ extantWordForm = sl.GetInstance().Create(wform);
+ segment.AnalysesRS.Add(extantWordForm);
});
// import an analysis with word gloss
@@ -294,7 +630,7 @@ public void ImportNewUserConfirmedWordGlossToExistingWord()
// make sure we've added the expected word gloss
Assert.That(importedPara.SegmentsOS[0].AnalysesRS.Count, Is.EqualTo(1));
var importedAnalysis = importedPara.SegmentsOS[0].AnalysesRS[0];
- var importedWord = importedAnalysis.Wordform;
+ var importedWordForm = importedAnalysis.Wordform;
var at = new AnalysisTree(importedAnalysis);
Assert.That(at.Gloss, Is.Not.Null, "IAnalysis should be WfiGloss");
var importedGloss = at.Gloss;
@@ -311,9 +647,8 @@ public void ImportNewUserConfirmedWordGlossToExistingWord()
Assert.That(imported.ContentsOA.ParagraphsOS.Count, Is.EqualTo(1));
Assert.AreEqual(paraContents.Text, importedPara.Contents.Text, "Imported Para contents differ from original");
Assert.IsTrue(paraContents.Equals(importedPara.Contents), "Ws mismatch between imported and original paragraph");
- Assert.That(importedWord.Form.get_String(wsf.get_Engine("en").Handle).Text,
+ Assert.That(importedWordForm.Form.get_String(wsf.get_Engine("en").Handle).Text,
Is.EqualTo("supercalifragilisticexpialidocious"));
- Assert.That(importedWord.Guid, Is.EqualTo(word.Guid));
// assert that nothing else was created
Assert.That(Cache.ServiceLocator.GetInstance().Count, Is.EqualTo(1));
Assert.That(Cache.ServiceLocator.GetInstance().Count, Is.EqualTo(1));
@@ -321,6 +656,66 @@ public void ImportNewUserConfirmedWordGlossToExistingWord()
}
}
+ ///
+ /// A helper method that builds a valid LCM object graph for a wordform with an analysis
+ /// and morphemes, ensuring all objects have a proper owner. This method should be called
+ /// from within a NonUndoableUnitOfWorkHelper.Do block.
+ ///
+ private IWfiWordform BuildWordformWithMorphemes(string vernacularWs = "fr")
+ {
+ var sl = Cache.ServiceLocator;
+ var wsf = Cache.WritingSystemFactory;
+
+ // Create the IWfiWordform object
+ var wordform = sl.GetInstance().Create();
+ wordform.Form.set_String(wsf.get_Engine(vernacularWs).Handle, "cats");
+
+ // Establish the ownership chain for the wordform's internal objects first.
+ var analysis = sl.GetInstance().Create();
+ var gloss = sl.GetInstance().Create();
+ wordform.AnalysesOC.Add(analysis);
+ analysis.MeaningsOC.Add(gloss);
+ gloss.Form.set_String(wsf.get_Engine("en").Handle, "gato");
+
+ var stemMorphBundle = sl.GetInstance().Create();
+ analysis.MorphBundlesOS.Add(stemMorphBundle);
+
+ var affixMorphBundle = sl.GetInstance().Create();
+ analysis.MorphBundlesOS.Add(affixMorphBundle);
+
+ // Create the owning LexEntries for the allomorphs. This is a new, crucial step.
+ // For this unit test, we'll create separate LexEntries to own the stem and the affix.
+ var stemLexEntry = sl.GetInstance().Create();
+ var affixLexEntry = sl.GetInstance().Create();
+
+ // Create the allomorphs and establish their ownership via the LexEntries.
+ // The LexEntry.LexemeFormOA property is an Owning Atom.
+ var stemAllomorph = sl.GetInstance().Create();
+ stemLexEntry.LexemeFormOA = stemAllomorph;
+
+ var affixAllomorph = sl.GetInstance().Create();
+ affixLexEntry.LexemeFormOA = affixAllomorph;
+
+ // Now that the allomorphs are valid and owned, we can assign them to the MorphRA properties.
+ stemMorphBundle.MorphRA = stemAllomorph;
+ affixMorphBundle.MorphRA = affixAllomorph;
+
+ // Now, set the string properties for the objects.
+ wordform.Form.set_String(wsf.get_Engine(vernacularWs).Handle, "cats");
+ stemMorphBundle.Form.set_String(wsf.get_Engine(vernacularWs).Handle, "cat");
+ affixMorphBundle.Form.set_String(wsf.get_Engine(vernacularWs).Handle, "-s");
+
+ // Assume ILexSense exists and can be created or retrieved
+ var lexSenseForStem = sl.GetInstance().Create();
+ stemLexEntry.SensesOS.Add(lexSenseForStem);
+ stemMorphBundle.SenseRA = lexSenseForStem;
+
+ var lexSenseForAffix = sl.GetInstance().Create();
+ affixLexEntry.SensesOS.Add(lexSenseForAffix);
+ affixMorphBundle.SenseRA = lexSenseForAffix;
+
+ return wordform;
+ }
[Test]
public void ImportNewUserConfirmedWordGlossToExistingWordWithGuid()
{
@@ -562,7 +957,7 @@ public void SkipConfirmedWordGlossToSameWordGloss()
}
[Test]
- public void ImportNewUserConfirmedWordGlossSeparatedFromToExistingWfiAnalysis()
+ public void ImportNewUserConfirmedWordGlossSeparatedFromExistingWfiAnalysis()
{
// build pre-existing data
var sl = Cache.ServiceLocator;
@@ -570,7 +965,7 @@ public void ImportNewUserConfirmedWordGlossSeparatedFromToExistingWfiAnalysis()
LCModel.IText text;
- IWfiWordform word = null;
+ IWfiWordform extandWordForm = null;
ITsString paraContents = null;
var segGuid = new Guid();
NonUndoableUnitOfWorkHelper.Do(Cache.ActionHandlerAccessor, () =>
@@ -587,10 +982,10 @@ public void ImportNewUserConfirmedWordGlossSeparatedFromToExistingWfiAnalysis()
ITsString wform = TsStringUtils.MakeString("supercalifragilisticexpialidocious",
wsf.get_Engine("en").Handle);
segGuid = segment.Guid;
- word = sl.GetInstance().Create(wform);
- var newWfiAnalysis = sl.GetInstance().Create();
- word.AnalysesOC.Add(newWfiAnalysis);
- segment.AnalysesRS.Add(word);
+ extandWordForm = sl.GetInstance().Create(wform);
+ var extantAnalysis = sl.GetInstance().Create();
+ extandWordForm.AnalysesOC.Add(extantAnalysis);
+ segment.AnalysesRS.Add(extandWordForm);
});
// import an analysis with word gloss
@@ -620,25 +1015,22 @@ public void ImportNewUserConfirmedWordGlossSeparatedFromToExistingWfiAnalysis()
// make sure imported word gloss is correct
Assert.That(importedPara.SegmentsOS[0].AnalysesRS.Count, Is.EqualTo(1));
var importedAnalysis = importedPara.SegmentsOS[0].AnalysesRS[0];
- var skippedWord = importedAnalysis.Wordform;
+ var importedWordForm = importedAnalysis.Wordform;
var at = new AnalysisTree(importedAnalysis);
Assert.That(at.Gloss, Is.Not.Null, "IAnalysis should be WfiGloss");
var newGloss = at.Gloss;
Assert.That(newGloss.Form.get_String(wsf.get_Engine("pt").Handle).Text, Is.EqualTo("absurdo"));
- Assert.That(skippedWord.Guid, Is.EqualTo(word.Guid));
// make sure nothing else has changed:
Assert.That(Cache.LanguageProject.Texts.Count, Is.EqualTo(1));
Assert.That(imported.ContentsOA.ParagraphsOS.Count, Is.EqualTo(1));
Assert.AreEqual(paraContents.Text, importedPara.Contents.Text, "Imported Para contents differ from original");
Assert.IsTrue(paraContents.Equals(importedPara.Contents), "Ws mismatch between imported and original paragraph");
- Assert.That(skippedWord.Form.get_String(wsf.get_Engine("en").Handle).Text,
+ Assert.That(importedWordForm.Form.get_String(wsf.get_Engine("en").Handle).Text,
Is.EqualTo("supercalifragilisticexpialidocious"));
- Assert.That(skippedWord.Guid, Is.EqualTo(word.Guid));
-
- // make sure nothing else changed
- Assert.That(Cache.ServiceLocator.GetInstance().Count, Is.EqualTo(1));
+ // The wordform should be reused, but with a new analysis
Assert.That(Cache.ServiceLocator.GetInstance().Count, Is.EqualTo(1));
+ Assert.That(Cache.ServiceLocator.GetInstance().Count, Is.EqualTo(1));
}
}
@@ -742,9 +1134,10 @@ public void DeserializeWordsFragDocument()
";
var li = new BIRDFormatImportTests.LLIMergeExtension(Cache, null, null);
+ var wsQaa = Cache.WritingSystemFactory.GetWsFromStr("qaa-x-kal");
Assert.DoesNotThrow(() => li.ImportWordsFrag(
() => new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray())),
- LinguaLinksImport.ImportAnalysesLevel.WordGloss));
+ LinguaLinksImport.ImportAnalysesLevel.WordGloss, wsQaa));
}
[Test]
@@ -765,7 +1158,7 @@ public void WordsFragDoc_OneWordAndOneGloss()
var li = new BIRDFormatImportTests.LLIMergeExtension(Cache, null, null);
Assert.DoesNotThrow(() => li.ImportWordsFrag(
() => new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray())),
- LinguaLinksImport.ImportAnalysesLevel.WordGloss));
+ LinguaLinksImport.ImportAnalysesLevel.WordGloss, wsKal.Handle));
var wordsRepo = Cache.ServiceLocator.GetInstance();
var wff1 = wordsRepo.GetMatchingWordform(wsKal.Handle, "glossedonce");
@@ -799,12 +1192,12 @@ public void WordsFragDoc_OneWordAndOneGloss_AvoidDuplication()
// First import
Assert.DoesNotThrow(() => li.ImportWordsFrag(
() => new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray())),
- LinguaLinksImport.ImportAnalysesLevel.WordGloss));
+ LinguaLinksImport.ImportAnalysesLevel.WordGloss, wsKal.Handle));
// Second Import
Assert.DoesNotThrow(() => li.ImportWordsFrag(
() => new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray())),
- LinguaLinksImport.ImportAnalysesLevel.WordGloss));
+ LinguaLinksImport.ImportAnalysesLevel.WordGloss, wsKal.Handle));
var wordsRepo = Cache.ServiceLocator.GetInstance();
var wff1 = wordsRepo.GetMatchingWordform(wsKal.Handle, "glossedonce");
@@ -837,7 +1230,7 @@ public void WordsFragDoc_OneWordAndMultiGloss()
var li = new BIRDFormatImportTests.LLIMergeExtension(Cache, null, null);
Assert.DoesNotThrow(() => li.ImportWordsFrag(
() => new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray())),
- LinguaLinksImport.ImportAnalysesLevel.WordGloss));
+ LinguaLinksImport.ImportAnalysesLevel.WordGloss, wsKal.Handle));
var wordsRepo = Cache.ServiceLocator.GetInstance();
var wff1 = wordsRepo.GetMatchingWordform(wsKal.Handle, "glossedtwice");
@@ -873,12 +1266,12 @@ public void WordsFragDoc_OneWordAndMultiGloss_AvoidDuplication()
// First import
Assert.DoesNotThrow(() => li.ImportWordsFrag(
() => new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray())),
- LinguaLinksImport.ImportAnalysesLevel.WordGloss));
+ LinguaLinksImport.ImportAnalysesLevel.WordGloss, wsKal.Handle));
// Second import
Assert.DoesNotThrow(() => li.ImportWordsFrag(
() => new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray())),
- LinguaLinksImport.ImportAnalysesLevel.WordGloss));
+ LinguaLinksImport.ImportAnalysesLevel.WordGloss, wsKal.Handle));
var wordsRepo = Cache.ServiceLocator.GetInstance();
var wff1 = wordsRepo.GetMatchingWordform(wsKal.Handle, "glossedtwice");
@@ -912,7 +1305,7 @@ public void WordsFragDoc_OneWordPhraseAndOneGloss()
var li = new BIRDFormatImportTests.LLIMergeExtension(Cache, null, null);
Assert.DoesNotThrow(() => li.ImportWordsFrag(
() => new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray())),
- LinguaLinksImport.ImportAnalysesLevel.WordGloss));
+ LinguaLinksImport.ImportAnalysesLevel.WordGloss, wsKal.Handle));
var wordsRepo = Cache.ServiceLocator.GetInstance();
var wff1 = wordsRepo.GetMatchingWordform(wsKal.Handle, "support a phrase");
@@ -945,11 +1338,11 @@ public void WordsFragDoc_OneWordPhraseAndOneGloss_AvoidDuplicates()
// First Import
Assert.DoesNotThrow(() => li.ImportWordsFrag(
() => new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray())),
- LinguaLinksImport.ImportAnalysesLevel.WordGloss));
+ LinguaLinksImport.ImportAnalysesLevel.WordGloss, wsKal.Handle));
// Second Import
Assert.DoesNotThrow(() => li.ImportWordsFrag(
() => new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray())),
- LinguaLinksImport.ImportAnalysesLevel.WordGloss));
+ LinguaLinksImport.ImportAnalysesLevel.WordGloss, wsKal.Handle));
var wordsRepo = Cache.ServiceLocator.GetInstance();
var wff1 = wordsRepo.GetMatchingWordform(wsKal.Handle, "support a phrase");
diff --git a/Src/LexText/Interlinear/LinguaLinksImport.cs b/Src/LexText/Interlinear/LinguaLinksImport.cs
index 13293ef8d9..57e3e76c2a 100644
--- a/Src/LexText/Interlinear/LinguaLinksImport.cs
+++ b/Src/LexText/Interlinear/LinguaLinksImport.cs
@@ -205,25 +205,25 @@ public enum ImportAnalysesLevel
WordGloss
}
- public void ImportWordsFrag(Func createWordsFragDocStream, ImportAnalysesLevel analysesLevel)
+ public void ImportWordsFrag(Func createWordsFragDocStream, ImportAnalysesLevel analysesLevel, int mainWs)
{
using (var stream = createWordsFragDocStream.Invoke())
{
var serializer = new XmlSerializer(typeof(WordsFragDocument));
var wordsFragDoc = (WordsFragDocument)serializer.Deserialize(stream);
NormalizeWords(wordsFragDoc.Words);
- ImportWordsFrag(wordsFragDoc.Words, analysesLevel);
+ ImportWordsFrag(wordsFragDoc.Words, analysesLevel, mainWs);
}
}
- internal void ImportWordsFrag(Word[] words, ImportAnalysesLevel analysesLevel)
+ internal void ImportWordsFrag(Word[] words, ImportAnalysesLevel analysesLevel, int mainWs)
{
s_importOptions = new ImportInterlinearOptions {AnalysesLevel = analysesLevel};
NonUndoableUnitOfWorkHelper.Do(m_cache.ActionHandlerAccessor, () =>
{
foreach (var word in words)
{
- CreateWordAnalysisStack(m_cache, word);
+ CreateWordformWithWfiAnalysis(m_cache, word, mainWs);
}
});
}
diff --git a/Src/LexText/Interlinear/WordsSfmImportWizard.cs b/Src/LexText/Interlinear/WordsSfmImportWizard.cs
index 00cf677e46..82335a9ffa 100644
--- a/Src/LexText/Interlinear/WordsSfmImportWizard.cs
+++ b/Src/LexText/Interlinear/WordsSfmImportWizard.cs
@@ -59,7 +59,7 @@ protected override void DoStage2Conversion(byte[] stage1, IThreadedProgress dlg)
// Until we have a better idea, assume we're half done with the import when we've produced the intermediate.
// TODO: we could do progress based on number of words to import.
dlg.Position += 50;
- stage2Converter.ImportWordsFrag(() => new MemoryStream(stage1), LinguaLinksImport.ImportAnalysesLevel.WordGloss);
+ stage2Converter.ImportWordsFrag(() => new MemoryStream(stage1), LinguaLinksImport.ImportAnalysesLevel.WordGloss, m_cache.DefaultVernWs);
}
diff --git a/Src/MasterVersionInfo.txt b/Src/MasterVersionInfo.txt
index e0aa266089..cc39f3fab5 100644
--- a/Src/MasterVersionInfo.txt
+++ b/Src/MasterVersionInfo.txt
@@ -1,4 +1,4 @@
FWMAJOR=9
FWMINOR=2
-FWREVISION=10
+FWREVISION=11
FWBETAVERSION=
diff --git a/Src/xWorks/CssGenerator.cs b/Src/xWorks/CssGenerator.cs
index 05fcccfb07..5c807fd266 100644
--- a/Src/xWorks/CssGenerator.cs
+++ b/Src/xWorks/CssGenerator.cs
@@ -689,6 +689,7 @@ private static StyleRule AdjustRuleIfParagraphNumberScheme(StyleRule rule, Confi
private static List GenerateCssFromWsOptions(ConfigurableDictionaryNode configNode, DictionaryNodeWritingSystemOptions wsOptions,
string baseSelection, ReadOnlyPropertyTable propertyTable)
{
+ var rules = new List();
var cache = propertyTable.GetValue("cache");
foreach(var ws in wsOptions.Options.Where(opt => opt.IsEnabled))
{
@@ -700,10 +701,10 @@ private static List GenerateCssFromWsOptions(ConfigurableDictionaryNo
if (!string.IsNullOrEmpty(configNode.Style))
wsRule.Declarations.Properties.AddRange(GenerateCssStyleFromLcmStyleSheet(configNode.Style, wsId, propertyTable));
if (!IsEmptyRule(wsRule))
- return new List {wsRule};
+ rules.Add(wsRule);
}
- return new List();
+ return rules;
}
private static List GenerateCssForWritingSystemPrefix(ConfigurableDictionaryNode configNode, string baseSelection, ReadOnlyPropertyTable propertyTable)