Migrating Wiki Pages Remotely – Part 06
Note, this series starts at blogs.msdn.com/dwinter/archive/2008/06/28/migrating-wiki-pages-remotely-part-01.aspx
Now that that is done, it is time to consider some potential difficulties of this approach. First, when you use the copy.asmx, the destination file will automatically gain a property called _CopySource that points back to the source item—making a dependency. When you browse the new destination page, it will have a link at the top noting that it is a copy of the other Wiki Page and provide a link to it. In my scenario, I didn’t want this because my source server was going to be going away—and that is not something that would automatically have cleaned up. So, I had to add a step of cleaning _CopySource after the copy operation was complete. I found it was more logical to copy everything first and then fix up the links second with a separate button. The good news is that it works perfectly and will have no side effects. If you clear _CopySource—you are setting it to the same value that a normal Wiki Page would have, so it is as though the copy operation through the copy.asmx never happened and you just manually had created the content. Here is the code to clean up _CopySource:
if (txt_SelectedWiki2.Text.Length > 0)
{
Server2WS.Lists s2L = new WikiMigrator.Server2WS.Lists();
s2L.Url = txt_SiteName2.Text.Trim().TrimEnd("/".ToCharArray()) + "/_vti_bin/lists.asmx";
s2L.Credentials = System.Net.CredentialCache.DefaultCredentials;
try
{
XmlDocument xmlDocLI = new System.Xml.XmlDocument();
XmlNode ndQueryLI = xmlDocLI.CreateNode(XmlNodeType.Element,"Query","");
XmlNode ndViewFieldsLI = xmlDocLI.CreateNode(XmlNodeType.Element,"ViewFields","");
XmlNode ndQueryOptionsLI = xmlDocLI.CreateNode(XmlNodeType.Element,"QueryOptions","");
ndQueryOptionsLI.InnerXml = "<IncludeMandatoryColumns>FALSE</IncludeMandatoryColumns>" +
"<DateInUtc>TRUE</DateInUtc>";
ndViewFieldsLI.InnerXml = "<FieldRef Name='FileRef' />" +
"<FieldRef Name='WikiField' />" +
"<FieldRef Name='LinkFilename' />";
ndQueryLI.InnerXml = "<Where><Eq><FieldRef Name='FileRef' />" +
"<Value Type='Text'>[server-relative URL of wiki page]</Value></Eq></Where>";
//
// Need to provide a large number or we will restrict at the default 100 items if null
XmlNode ndListItems = s2L.GetListItems(txt_SelectedWiki2.Text, null, null, ndViewFieldsLI, txtNumberRows.Text, null, null);
XmlNode ndListItemDetail = ndListItems.ChildNodes[1];
foreach (XmlNode item in ndListItemDetail.ChildNodes)
{
try
{
if (item.Attributes != null)
{
string itemName = item.Attributes["ows_LinkFilename"].Value;
Trace.WriteLine("Fixing: " + itemName);
if (!string.IsNullOrEmpty(itemName))
{
string copySource = txt_SiteName.Text.Trim().TrimEnd("/".ToCharArray()) + "/" + txt_SelectedWiki.Text.Trim().TrimEnd("/".ToCharArray()).TrimStart("/".ToCharArray()) + "/" + itemName;
string copyDest = txt_SiteName2.Text.Trim().TrimEnd("/".ToCharArray()) + "/" + txt_SelectedWiki2.Text.Trim().TrimEnd("/".ToCharArray()).TrimStart("/".ToCharArray()) + "/" + itemName;
string wikiData = item.Attributes["ows_WikiField"].Value;
string actualWikiData = string.Empty;
if (string.IsNullOrEmpty(wikiData))
{
Trace.WriteLine("...using ows_MetaInfo instead of ows_WikiField");
string itemData = item.Attributes["ows_MetaInfo"].Value;
Regex metaProp = new Regex(@"( \w*:\w{2}\|)");
string[] regexData = metaProp.Split(itemData);
bool prepnextMatch = false;
foreach (string data in regexData)
{
try
{
if (data != string.Empty)
{
if (!prepnextMatch)
{
if (data == " WikiField:SW|")
{
prepnextMatch = true;
}
}
else if (prepnextMatch && actualWikiData == string.Empty)
{
actualWikiData = data;
break;
}
else
{
throw new System.Exception("E_FAIL");
}
}
}
catch {}
}
}
else
{
actualWikiData = wikiData;
}
// Locals for replacement operations
string sitename = txt_SiteName.Text.TrimEnd("/".ToCharArray());
string destsite = txt_SiteName2.Text.TrimEnd("/".ToCharArray());
string wiki1 = txt_SelectedWiki.Text.TrimStart("/".ToCharArray());
string wiki2 = txt_SelectedWiki2.Text.TrimStart("/".ToCharArray());
Uri sourceSiteUri = new Uri(sitename);
Uri destSiteUri = new Uri(destsite);
string sourceAbsolute = sourceSiteUri.AbsolutePath.TrimEnd("/".ToCharArray());
string destAbsolute = destSiteUri.AbsolutePath.TrimEnd("/".ToCharArray());
// server1/site/library to server2/newsite/newlibrary
string modifiedData = Regex.Replace(actualWikiData, Regex.Escape(sitename + "/" + wiki1), Regex.Escape(destsite + "/" + wiki2), RegexOptions.IgnoreCase);
// server1 to server2
modifiedData = Regex.Replace(modifiedData, Regex.Escape(sitename), Regex.Escape(destsite), RegexOptions.IgnoreCase);
// /site/library to /newsite/newlibrary (+ Encoded)
modifiedData = Regex.Replace(modifiedData, Regex.Escape(sourceAbsolute + "/" + wiki1), Regex.Escape(destAbsolute + "/" + wiki2), RegexOptions.IgnoreCase);
modifiedData = Regex.Replace(modifiedData, Regex.Escape(sourceAbsolute + "/" + Uri.EscapeDataString(wiki1)), Regex.Escape(destAbsolute + "/" + Uri.EscapeDataString(wiki2)), RegexOptions.IgnoreCase);
// /site to /newsite
// This is very dangerous and is commented because of it...
// since source could be '/' and if it actually is, we would
// replace every / in the doc
//if (sourceSiteUri.AbsolutePath != "/")
//{
// modifiedData = modifiedData.Replace(sourceSiteUri.AbsolutePath, destSiteUri.AbsolutePath);
// modifiedData = Regex.Replace(modifiedData, Regex.Escape(), Regex.Escape(), RegexOptions.IgnoreCase);
//}
// Kill the linefeeds and \\ because they will be literal since we have to use CDATA. This could flatten \\server\share links to \server\share in the text, but the links should still work. You could do something more elegant here to protect against that.
modifiedData = modifiedData.Replace(@"\r\n", "");
modifiedData = modifiedData.Replace(@"\\", @"\");
if (chk_AdvRepl.Checked)
{
modifiedData = modifiedData.Replace(txt_Repl1.Text, txt_Repl2.Text);
}
string strBatch = "<Method ID='1' Cmd='Update'>" +
"<Field Name='ID'>" + item.Attributes["ows_ID"].Value + "</Field>" +
"<Field Name='WikiField'><![CDATA[" + modifiedData + "]]></Field>" +
"<Field Name='_CopySource'></Field></Method>";
if (chkDebugFull.Checked)
{
Trace.WriteLine("***** " + itemName + " *****");
Trace.WriteLine(strBatch);
}
XmlDocument xmlDoc = new System.Xml.XmlDocument();
System.Xml.XmlElement elBatch = xmlDoc.CreateElement("Batch");
elBatch.SetAttribute("OnError", "Continue");
elBatch.SetAttribute("ListVersion", "1");
elBatch.InnerXml = strBatch;
s2L.UpdateListItems(txt_SelectedWiki2.Text, elBatch);
Trace.WriteLine("Updated: " + itemName);
txt_Status.Text += "Updated: " + itemName + "\r\n";
txt_Status.Select(txt_Status.Text.Length, 0);
txt_Status.ScrollToCaret();
}
}
}
catch {}
}
}
catch {}
Part 07:
blogs.msdn.com/dwinter/archive/2008/06/28/migrating-wiki-pages-remotely-part-07.aspx
Comments
Anonymous
June 27, 2008
Note, this series starts at http://blogs.msdn.com/dwinter/archive/2008/06/28/migrating-wiki-pages-remotely-part-01.aspxAnonymous
November 23, 2010
Nice job! Though a heads up - I've had cases where elements have spaces in the title; "Program Name" for example picked up from who knows where. In my case it made it impossible to get the title (occasionally tacking " Program" at the end.