Created
March 16, 2012 22:41
-
-
Save Ttech/2053336 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env php | |
<?php | |
/* | |
Wiki Namespace Updater | |
2011 | |
*/ | |
define("PAGELIST","./pages.list.txt"); | |
define("DEST_DIR","./pages/"); | |
define("SOURCE_DIR","./pages/"); | |
define("DELETE_SOURCE_FILE",FALSE); | |
function updater_load_list($file,&$page_count){ | |
$page_count = 1; | |
$page_final_array = array(); | |
if(!file_exists($file)){ | |
die("no file exists. cannot continue\n"); | |
} | |
$page_list_file = fopen($file,"r"); | |
// Get contents of list and create an array of the raw data | |
$page_list_data = explode("\n",fread($page_list_file, filesize($file))); | |
// Break apart the list so we get each start page and final page | |
foreach($page_list_data as $page){ | |
$page_count++; | |
$page_data = explode(" ",$page); | |
$page_basename = basename($page_data[0]); | |
// Clean up the data being stored so we can work with it | |
// Gets rid of whitespace, and stores it | |
$page_final_array[$page_basename] = array( | |
str_replace("./","",$page_data[0]), | |
str_replace("./","",$page_data[1]) | |
); | |
} | |
return $page_final_array; | |
} | |
function updater_read_original_file($pages){ | |
$page_contents = ""; | |
$file = SOURCE_DIR.$pages[0].".txt"; | |
$page_content_match = str_replace(array(" ","_"),".*",basename($pages[0])); | |
$new_line = "[[".str_replace("/",":",$pages[1])."]]"; | |
if(file_exists($file)){ | |
$old_page_file= fopen($file,"r"); | |
if($old_page_file){ | |
while(($line = fgets($old_page_file,4096))){ | |
// This may need to be disabled because it is stupid! | |
$page_contents.=preg_replace("/.*\[\[.*$page_content_match.*\]\]/",$new_line,$line); | |
//echo $page_contents; | |
} | |
} | |
return $page_contents; | |
} else { | |
return false; | |
} | |
} | |
function updater_create_new_file($file,$contents=""){ | |
$path_parts = explode("/",$file); | |
//var_dump($path_parts); | |
$last_path_part = end($path_parts); | |
// Start the directory we want to put all content into. | |
$full_path = DEST_DIR; | |
foreach($path_parts as $path){ | |
if(isset($path)){ | |
if(@stristr($path,$last_path_part) == FALSE){ | |
$full_path .= $path."/"; | |
if(!is_dir($full_path)){ | |
mkdir($full_path); | |
} | |
} | |
} | |
} | |
// Now we write data to file. | |
$file_handle = fopen(DEST_DIR.$file.".txt","w+"); | |
if (fwrite($file_handle, $contents) === FALSE) { | |
return false; | |
} else { | |
return true; | |
} | |
fclose($file_handle); | |
} | |
function updater_remove_old_file($paths){ | |
if(DELETE_SOURCE_FILE == TRUE){ | |
if(file_exists($paths[1])){ | |
if(unlink($path[0])){ | |
return true; | |
} else { | |
return false; | |
} | |
} | |
} | |
} | |
// This gem handles the little logic we need to see if everything really worked | |
// I don't claim it is the most reable content in the world | |
function updater_handle_page($page_list){ | |
foreach($page_list as $pagebasename => $paths){ | |
print "Processing $pagebasename...\t\t"; | |
if($page_contents = updater_read_original_file($paths)){ | |
print "Reading file...\t"; | |
if(updater_create_new_file($paths[1],$page_contents)){ | |
//print "Updated File...\t"; | |
if(updater_remove_old_file($paths)){ | |
print "Success.......\n"; | |
} else { | |
print "Unable to complete process.\n"; | |
} | |
} else { | |
print "Unable to update... Skipping\n"; | |
} | |
} else { | |
print "Cannot read file... Skipping\n"; | |
} | |
} | |
} | |
function main(){ | |
$page_list_data = updater_load_list(PAGELIST,$page_count); | |
print "Wiki Update Starting...\n"; | |
print "Preparing to process $page_count pages.\n\n"; | |
updater_handle_page($page_list_data); | |
} | |
// Run! | |
main(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment