uid] = $user->name . " ($user->mail)";
}
asort($users);
$select = array(
'#type' => 'select',
'#title' => t("Set user ID of entries in this file to"),
'#options' => $users,
'#default_value' => $my_uid,
'#disabled' => (user_access('administer biblio')) ? FALSE : TRUE
);
return $select;
}
/**
* Return a form used to import files into biblio.
*
* @return
* An array which will be used by the form builder to build the import form
*/
function biblio_import_form() {
global $user;
$msg = '';
if (biblio_access('import')) { // && !user_access('administer nodes')) {
$form['#attributes']['enctype'] = 'multipart/form-data';
$form['biblio_import_file'] = array(
'#type' => 'file',
'#title' => t('Import file'),
'#default_value' => '',
'#size' => 60
);
$import_options = module_invoke_all('biblio_import_options');
if (count($import_options) > 1) {
$form['filetype'] = array(
'#type' => 'select',
'#title' => t('File Type'),
'#default_value' => 0,
'#options' => array(
'0' => t('Select type'),
)
);
$form['filetype']['#options'] = array_merge($form['filetype']['#options'], $import_options);
asort($form['filetype']['#options']);
}
elseif (count($import_options) == 1) {
$form['biblio_import_file']['#description'] = t('Import type: @option', array('@option' => current($import_options)));
$form['filetype'] = array(
'#type' => 'value',
'#value' => key($import_options),
);
}
$form['batch_process'] = array(
'#type' => 'checkbox',
'#title' => t('Batch Process'),
'#default_value' => 1,
'#description' => t('You should use batch processing if your import file contains more than about 20 records, or if you are experiencing script timeouts during import'),
);
$form ['userid'] = _biblio_admin_build_user_select($user->uid);
// Get the vocabularies attached to the biblio node type ...
$vocabularies = module_invoke('taxonomy', 'get_vocabularies', 'biblio');
// ... and print a form to select the terms in each of them
$form['import_taxonomy'] = array(
'#type' => 'fieldset',
'#collapsible' => TRUE,
'#collapsed' => TRUE,
'#title' => t('Taxonomy Settings'),
'#description' => t('Typically you don\'t have to do anything here, however if you wish, you may select terms to be assigned to imported records. This effectively adds a keyword to all entries being imported.'));
if (count($vocabularies)) {
if (variable_get('biblio_keyword_freetagging', 0)) {
$freetag_vocab = $vocabularies[variable_get('biblio_keyword_vocabulary', 0)];
unset($vocabularies[variable_get('biblio_keyword_vocabulary', 0)]);
$msg = t('NOTE: Keyword "free tagging" is turned on, consequently all incomming keywords will be added to the @name vocabulary as specified in the "Keyword" section of the !url page.', array('@name' => $freetag_vocab->name, '!url' => l(t('admin/config/biblio'), 'admin/config/biblio')));
}
else {
$msg = t('NOTE: Keyword "free tagging" is turned off, consequently keywords will NOT be added to the vocabulary as specified in the Taxonomy section of the !url page.', array('!url' => l(t('admin/config/biblio'), 'admin/config/biblio')));
}
$i = 0;
foreach ($vocabularies as $vocabulary) {
$form['import_taxonomy']['vocabulary' . $i] = module_invoke('taxonomy', 'form', $vocabulary->vid, 0);
$form['import_taxonomy']['vocabulary' . $i]['#weight'] = $vocabulary->weight;
$form['import_taxonomy']['vocabulary' . $i++]['#description'] = t("Select taxonomy term to be assigned to imported entries");
}
$form['import_taxonomy']['copy_to_biblio'] = array(
'#type' => 'checkbox',
'#title' => t('Copy these terms to the biblio keyword database'),
'#return_value' => 1,
'#default_value' => variable_get('biblio_copy_taxo_terms_to_keywords', 0),
'#description' => t('If this option is selected, the selected taxonomy terms will be copied to the ' . check_plain(variable_get('biblio_base_title', 'Biblio')) . ' keyword database and be displayed as keywords (as well as taxonomy terms) for this entry.')
);
}
else {
if (module_exists('taxonomy')) {
$vocab_msg = t('There are currently no vocabularies assigned to the biblio node type, please go the the !url page to fix this', array('!url' => l(t('admin/content/taxonomy'), 'admin/content/taxonomy')));
}
else{
$vocab_msg = '
' . t('Depends on') . ': ' . t('Taxonomy') . ' (' . t('disabled') . ')
';
}
$form['import_taxonomy']['vocabulary_message'] = array(
'#value' => '' . $vocab_msg . '
'
);
}
$form['import_taxonomy']['freetagging_information'] = array(
'#value' => '' . $msg . '
'
);
$form['button'] = array('#type' => 'submit', '#value' => t('Import'));
return $form;
}
else {
drupal_set_message(t("You are not authorized to access the biblio import page"), 'error');
print theme('page', '');
}
}
/**
* Implementation of hook_validate() for the biblio_import_form.
*/
function biblio_import_form_validate($form, & $form_state) {
$op = $form_state['values']['op'];
$filetype = $form_state['values']['filetype'];
if ($error = $_FILES['files']['error']['biblio_import_file']) {
switch ($error) {
case 1: form_set_error('biblio_import_form', t("The uploaded file exceeds the upload_max_filesize directive in php.ini."));
break;
case 2: form_set_error('biblio_import_form', t("The uploaded file exceeds the MAX_FILE_SIZE directive that was specified in the HTML form."));
break;
case 3: form_set_error('biblio_import_form', t("The uploaded file was only partially uploaded."));
break;
case 4: form_set_error('biblio_import_form', t("No file was uploaded."));
break;
case 6: form_set_error('biblio_import_form', t("Missing a temporary folder."));
break;
case 7: form_set_error('biblio_import_form', t("Failed to write file to disk."));
break;
case 8: form_set_error('biblio_import_form', t("File upload stopped by extension."));
}
}
if ($op == t('Import') && $filetype == "none") {
form_set_error('biblio_import_form', t("Error: You must select a file type"));
}
}
/**
* Implementation of hook_submit() for the biblio_import_form.
*/
function biblio_import_form_submit($form, & $form_state) {
global $user;
$batch_proc = FALSE;
$extensions = 'xml bib enw mrc ris txt';
$validators['file_validate_extensions'] = array();
$validators['file_validate_extensions'][0] = $extensions;
if ($form_state['values']['op'] == t('Import') && isset ($form_state['values']['filetype'])) {
if ($import_file = file_save_upload('biblio_import_file', $validators)) {
if ($form_state['values']['batch_process'] == 1) {
$batch_proc = TRUE; // we will use batch import for larger files.
}
// Concatenate all the terms of the different vocabularies
// in a single array to be sent to biblio_import
$terms = array();
foreach (array_keys($form_state['values']) as $key) {
if (preg_match('/(vocabulary[0-9]+)/', $key)) {
if (!empty($form_state['values'][$key])) {
if (is_array($form_state['values'][$key])) {
$terms[] = $form_state['values'][$key];
}
else {
$terms[] = array($form_state['values'][$key]);
}
}
}
if (count($terms) && $key == 'copy_to_biblio') $terms['copy_to_biblio'] = $form_state['values'][$key];
}
// Added the $terms argument
// the array of terms to be attached to the node(s)
$userid = (isset($form_state['values']['userid'])) ? $form_state['values']['userid'] : $user->uid;
$filetype = $form_state['values']['filetype'];
$filesize = sprintf("%01.1f", $import_file->filesize / 1000);
$filesize = " ($filesize KB)";
if ($batch_proc) {
$session_id = microtime();
$batch_op = array(
'title' => t('Importing @filename', array('@filename' => $import_file->filename . $filesize)),
'operations' => array(
array('biblio_import', array($import_file, $filetype, $userid, $terms, $batch_proc, $session_id)),
array('biblio_import_batch_operations', array($session_id, $user, $userid, $terms))
),
'progressive' => TRUE,
'finished' => 'biblio_import_batch_finished',
'init_message' => t('Parsing file...'),
'progress_message' => t('Saving nodes...'),
'file' => './' . drupal_get_path('module', 'biblio') . '/includes/biblio.import.export.inc'
);
batch_set($batch_op);
$base = variable_get('biblio_base', 'biblio');
batch_process("$base/import");
}
else{ //not batch processing the file
$session_id = microtime();
$context = array();
biblio_import($import_file, $filetype, $userid, $terms, $batch_proc, $session_id, $context);
biblio_import_finalize(TRUE, $context['results']);
}
file_delete($import_file);
}
else {
drupal_set_message(t("File was NOT successfully uploaded"), 'error');
}
}
}
function biblio_import_batch_operations($session_id, $user, $userid, $terms, &$context) {
$limit = 10;
if (empty($context['sandbox'])) {
// Initiate multistep processing.
$context['results']['session_id'] = $session_id;
$context['results']['userid'] = $userid;
$context['results']['user'] = $user;
$context['results']['terms'] = $terms;
$context['sandbox']['progress'] = 0;
$context['sandbox']['current_id'] = 0;
$context['results']['nids'] = array();
$context['sandbox']['max'] = db_query("SELECT COUNT(DISTINCT(id)) FROM {biblio_import_cache} WHERE session_id = :sessid", array(':sessid' => $session_id))->fetchField();
$context['sandbox']['itters'] = $context['sandbox']['max'] / $limit;
$context['sandbox']['eta'] = 0;
}
// Bail out if the cache is empty.
if ($context['sandbox']['max'] == 0) {
return;
}
// Process the next 20 nodes.
timer_start('biblio_import');
$result = db_query_range("SELECT id, data FROM {biblio_import_cache} WHERE id > :id AND session_id = :sessid ORDER BY id ASC", 0, $limit, array(':id' => $context['sandbox']['current_id'], ':sessid' => $session_id));
foreach ($result as $row) {
if ($node = unserialize(base64_decode($row->data))) {
biblio_save_node($node);
$context['results']['nids'][] = $node->nid;
}
$context['sandbox']['progress']++;
$context['sandbox']['current_id'] = $row->id;
}
$looptime = timer_stop('biblio_import');
$context['sandbox']['eta'] += $looptime['time'];
$itters = $context['sandbox']['progress'] / $limit;
if ($itters) {
$average_time = $context['sandbox']['eta'] / $itters;
$eta = (($context['sandbox']['itters'] * $average_time) - ($average_time * $itters)) / 1000;
if ($eta >= 60) {
$min = (int) $eta / 60;
}
else {
$min = 0;
}
$sec = $eta % 60;
$eta = sprintf("%d:%02d", $min, $sec);
$progress = sprintf("%d / %d", $context['sandbox']['progress'], $context['sandbox']['max'] );
$context['message'] = t('
Nodes saved: %progress
Time remaining: %eta min.
' , array('%progress' => $progress, '%eta' => $eta));
}
// Multistep processing : report progress.
if ($context['sandbox']['progress'] <= $context['sandbox']['max']) {
$context['finished'] = $context['sandbox']['progress'] / $context['sandbox']['max'];
}
}
function biblio_import_batch_finished($success, $results, $operations) {
biblio_import_finalize($success, $results);
//clean up import cache...
// db_query("DELETE FROM {biblio_import_cache} WHERE session_id = :sessid", array(':sessid' => $results['session_id']));
db_delete('biblio_import_cache')
->condition('session_id', $results['session_id'])
->execute();
}
function biblio_import_finalize($success, $results) {
$format = $results['format'];
$nids = $results['nids'];
$dups = $results['dups'];
$total = count($nids) + count($dups);
// drupal_set_message(t("%count of %total nodes were successfully imported.", array('%count' => count($nids), '%total' => $total)), (count($nids) != $total)?'warning':'status');
if ($success && (count($nids) || count($dups))) {
$message = t("The file @file was successfully uploaded.", array('@file' => $results['file']->filename));
drupal_set_message($message, 'status');
watchdog($format, $message);
$count = count($nids);
$message = format_plural($count, 'One of @total node imported.', '@count of @total nodes imported.', array('@total' => $total));
drupal_set_message($message, 'status');
watchdog($format, $message, array('@count' => $count, '@total' => $total), WATCHDOG_INFO);
if (count($dups)) {
$count = count($dups);
$message = format_plural($count, 'One duplicate node skipped.', '@count duplicate nodes skipped.');
drupal_set_message($message, 'status');
watchdog($format, $message, array('@count' => $count), WATCHDOG_INFO);
}
}
else {
$count = count($nids);
$message = t('Import finished with an error! ') . format_plural($count, 'One node imported.', '@count nodes imported.');
drupal_set_message($message, 'error');
watchdog($format, $message, array('@count' => $count), WATCHDOG_ERROR);
}
$user = $results['user'];
$userid = $results['userid'];
if (user_access('administer biblio') && count($nids) && $user->uid != $userid) {
// db_query('UPDATE {node} SET uid = :uid WHERE nid IN(:nid)', array(':uid' => $results['userid'], ':nid' => implode(',', $nids)));
db_update('node')
->fields(array('uid' => $results['userid']))
->condition('nid', $nids, 'IN')
->execute();
// db_query('UPDATE {node_revision} SET uid = :uid WHERE nid IN(:nid)', array(':uid' => $results['userid'], ':nid' => implode(',', $nids)));
db_update('node_revision')
->fields(array('uid' => $results['userid']))
->condition('nid', $nids, 'IN')
->execute();
}
}
function biblio_import_from_url($URL) {
$handle = fopen($URL, "r"); // fetch data from URL in read mode
$data = "";
if ($handle) {
while (!feof($handle)) {
$data .= fread($handle, 4096); // read data in chunks
}
fclose($handle);
}
else {
$errorMessage = t("Error occurred: Failed to open %url", array('%url', $URL)); // network error
drupal_set_message($errorMessage, 'error');
}
return $data;
}
function biblio_export_form() {
$form['pot'] = array(
'#type' => 'fieldset',
'#collapsible' => TRUE,
'#collapsed' => TRUE,
'#title' => t('POT Export'),
'#description' => t('Here you may export a ".pot" file which contains the titles and hints from the database which are not normally captured by translation extractors)')
);
$form['pot']['button'] = array(
'#type' => 'submit',
'#value' => t('Export translation data')
);
return $form;
}
function biblio_export_form_submit($form, & $form_state) {
if ($form_state['values']['op'] == t('Export translation data')) {
biblio_dump_db_data_for_pot();
}
}
/**
* Import data from a file and return the node ids created.
*
* @param $userid
* The user id of that will be assigned to each node imported
* @param $filename
* The name of the file containing the data to import
* @param $type
* The format of the file to be imported (tagged, XML, RIS, bibTEX)
* @param $terms
* the vocabulary that the imported nodes will be associated with
* @return
* An array the node id's of the items imported
*/
function biblio_import($import_file, $type, $userid = 1, $terms = NULL, $batch = FALSE, $session_id = NULL, &$context ) {
global $user;
$parsed = 0;
$nids = array();
$dups = array();
if (isset($context['message'])) $context['message'] = t('Parsing file');
switch ($type) {
case 'csv' : // comma separated variable file
// $file_content = @ file_get_contents($import_file->uri);
// $parsed = biblio_csv_import($file_content, $node_template, $node_array);
break;
case 'biblio_backup' : // a complete backup of all biblio information
$file_content = @ file_get_contents($import_file->uri);
$parsed = biblio_restore($file_content, $node_template, $node_array);
break;
default:
list($nids, $dups) = module_invoke($type, 'biblio_import', $import_file, $terms, $batch, $session_id);
break;
}
$context['results']['nids'] = $nids;
$context['results']['dups'] = $dups;
$context['results']['format'] = $type;
$context['results']['userid'] = $userid;
$context['results']['user'] = $user;
$context['results']['file'] = $import_file;
return ;
}
/**
* Export nodes in a given file format.
*
* @param $format
* The file format to export the nodes in (tagged, XML, bibTEX)
* @param $nid
* If not NULL, then export only the given nodeid, else we will
* use the session variable which holds the most recent query. If neither
* $nid or the session variable are set, then nothing is exported
* @param $version
* The version of EndNote XML to use. There is one format for ver. 1-7 and
* a different format for versions 8 and greater.
* @return
* none
*/
function biblio_export($format = "tagged", $nid = NULL, $popup = FALSE) {
$params = array();
$nids = array();
$arg_list = array();
if ($nid === NULL && isset ($_SESSION['last_biblio_query']) ) {
module_load_include('inc', 'biblio', 'includes/biblio.pages');
$uri = drupal_parse_url(request_uri());
$arg_list += $uri['query'];
$arg_list['page_limit'] = 0;
$query_info = biblio_build_query($arg_list);
if (isset($query_info['query'])) {
$result = $query_info['query'];
}
else {
$result = array();
}
foreach ($result as $node) {
$nids[] = $node->nid;
}
}
elseif (!empty ($nid)) {
$nids[] = $nid;
}
elseif (!count($nids)) {
return;
}
module_invoke('biblio_' . $format, 'biblio_export', $nids);
}
/**
* Save node imported from a file.
*
* @param $node_array
* a 2 dimensional array containing all the node information
* @return
* The node ids of the saved nodes
*/
function biblio_save_imported_nodes(& $node_array) {
$dup_count = 0;
if (function_exists('node_save')) {
foreach ($node_array as $imp_node) {
$node_ids[] = biblio_save_node($imp_node);
}
}
/* if ($dup_count)
drupal_set_message(t("Detected @dupcount duplicate node(s) when importing", array('@dupcount' => $dup_count)), 'error');
drupal_set_message(t("Succesfully imported @count entries.", array('@count' => count($node_ids))), 'status');
*/
return $node_ids;
}
function biblio_save_node($node, $batch = FALSE, $session_id = NULL, $save_node = TRUE) {
global $user;
if ($batch && $session_id) { // we are batch processing some import data
$node = base64_encode(serialize($node));// base64_encode to avoid problems unserializing strings with embeded quotes.
db_query("INSERT INTO {biblio_import_cache} (session_id, data) VALUES (:sessid, :node)", array(':sessid' => $session_id, ':node' => $node));
return;
}
$options = variable_get('node_options_biblio', array('status'));
if (module_exists('i18n') && variable_get('i18n_node_biblio', 0) && variable_get('language_content_type_biblio', 0) ) {
$node->language = module_invoke('i18n', 'default_language');
}
$node->uid = $user->uid;
$node->type = 'biblio';
$node->comment = variable_get('comment_biblio', 0);
$node->promote = (int) in_array('promote', $options);
$node->moderate = (int) in_array('moderate', $options);
$node->sticky = (int) in_array('sticky', $options);
$node->format = 0;
$node->status = (int) in_array('status', $options);
if (!isset($node->biblio_type)) {
$node->biblio_type = 129; // default to misc if not set.
}
if ($save_node) { // $save_node = TRUE, the normal save path
if (strlen($node->title) > 255) {
$node->title = substr($node->title, 0, 255);
node_save($node);
watchdog('biblio - import', 'Title value truncated to 255 characters', array(), WATCHDOG_ALERT, l($node->title, 'node/' . $node->nid));
}
else {
node_save($node);
}
return; // (isset($node->nid)) ? $node->nid : 0;
}
else { // $save_node = FALSE, primarily used to parse data and return it to the input form
return (array)$node;
}
}
function biblio_csv_export_2($result, $bfields) {
// $query_biblio_fields = 'SELECT name, title FROM {biblio_fields}';
// $res_biblio_fields = db_query($query_biblio_fields);
// while ($rec = db_fetch_object($res_biblio_fields)) {
// $bfields[$rec->name] = $rec->title;
// }
$bfields = biblio_get_db_fields('all');
$query_biblio_types = 'SELECT tid, name FROM {biblio_types}';
$res_biblio_types = db_query($query_biblio_types);
foreach ($res_biblio_types as $rec) {
$btypes[$rec->tid] = $rec->name;
}
switch (variable_get('biblio_csv_field_sep', 'tab')) {
case 'tab' :
$filedsep = "\t";
break;
case 'comma' :
$filedsep = ',';
break;
}
switch (variable_get('biblio_csv_text_sep', 'dquote')) {
case 'dquote' :
$textsep = '"';
break;
case 'quote' :
$textsep = '\'';
break;
}
$label = (variable_get('biblio_csv_col_head', 'label') == 'label' ? 1 : 0); // or 'col_name'
$linebreak = variable_get('biblio_linebreak_exp', 1);
foreach ($result as $rec) {
$node_id = $rec->nid;
$node_array[$node_id]['type'] = $btypes[$rec->biblio_type]; // there is no "label" for "type"
$col_array['type'] = 'Type';
foreach (array_keys($bfields) as $fieldname) {
if (!empty ($rec-> $fieldname) && !in_array($fieldname, array(
'biblio_citekey',
'biblio_coins'
))) {
$col_array[$fieldname] = $bfields[$fieldname]; // mark field as in use
$text = strtr($rec-> $fieldname, $textsep, "$textsep$textsep");
if ($linebreak) {
$text = strtr($text, ';', "\n");
}
$node_array[$node_id][$fieldname] = trim($text);
}
}
} //end while
if ($label) { // head line containing column names
$csv = $textsep . join("$textsep$filedsep$textsep", array_values($col_array)) . "$textsep\n";
}
else { // original DB field names
$csv = $textsep . join("$textsep$filedsep$textsep", array_keys($col_array)) . "$textsep\n";
}
// Enclosing text in "" is neccessary to enshure
// multi line fields (like author) are handled correctly.
// Therefore existing " must be excaped before.
$csv = '"' . join("\"\t\"", array_keys($col_array)) . "\"\n";
foreach ($node_array as $line_array) {
$csv_line = '';
foreach (array_keys($col_array) as $col) {
$csv_line .= "$filedsep$textsep" . $line_array[$col] . $textsep;
}
$csv .= substr($csv_line, 1) . "\n"; // cut off leading fieldsep and append EOL
}
drupal_add_http_header('Content-Type', 'text/plain; charset=utf-8');
drupal_add_http_header('Content-Disposition', 'attachment; filename=biblio_export.csv');
return $csv;
}
//function _biblio_cck_join($biblio_fields = array()) { // works not with php4
function _biblio_cck_join(& $biblio_fields) {
$cck_join = '';
$biblio_fields['nid'] = 'Node-ID'; // identify records for update operations
$query_cck_fields = "SELECT field_name, label from {node_field_instance} where type_name='biblio' and not (widget_type='image')";
$res_cck_fields = db_query($query_cck_fields);
foreach ($$res_cck_fields as $rec) {
$cck_table = 'content_' . $rec->field_name;
$cck_field = $rec->field_name . '_value';
$biblio_fields[$cck_field] = $rec->label;
$cck_join .= ' left join {' . $cck_table . '} on b.vid=' . $cck_table . '.vid';
}
return $cck_join;
}
function biblio_backup() {
$csv_function = (!function_exists('fputcsv')) ? 'biblio_fputcsv' : 'fputcsv';
$count_sql = "SELECT COUNT(*)
FROM {biblio} b, {node} n, {node_revision} nr
WHERE b.vid = n.vid and nr.vid = n.vid;";
$field_type_sql = "SELECT * FROM {biblio_field_type} ";
$field_type_data_sql = "SELECT * FROM {biblio_field_type_data} ";
$field_fields_sql = "SELECT * FROM {biblio_fields} ";
$types_sql = "SELECT * FROM {biblio_types} ";
$sql = "SELECT b.*,
n.type, n.language, n.title, n.uid, n.status, n.created,
n.changed, n.comment, n.promote, n.moderate, n.sticky,
n.tnid, n.translate,
nr.title, nr.body, nr.teaser, nr.log, nr.timestamp, nr.format
FROM {biblio} b, {node} n, {node_revision} nr
WHERE b.vid = n.vid and nr.vid = n.vid;";
$biblio_count = db_result(db_query($count_sql));
if ($biblio_count) {
drupal_add_http_header('Content-Type', 'text/plain; charset=utf-8');
drupal_add_http_header('Content-Disposition', 'attachment; filename=Biblio-export.csv');
$biblio_nodes = db_query($sql);
while ($node = db_fetch_array($biblio_nodes)) {
$results[] = $node;
}
print biblio_csv_export($results);
unset($results);
$result = db_query($field_type_data_sql, 'biblio_field_type_data.csv');
while ($data = db_fetch_array($result)) {
$results[] = $data;
}
print biblio_csv_export($results);
unset($results);
$result = db_query($field_fields_sql, 'biblio_fields.csv');
while ($data = db_fetch_array($result)) {
$results[] = $data;
}
print biblio_csv_export($results);
unset($results);
$result = db_query($types_sql, 'biblio_types.csv');
while ($data = db_fetch_array($result)) {
$results[] = $data;
}
print biblio_csv_export($results);
unset($results);
$result = db_query($field_type_sql, 'biblio_field_type.csv');
while ($data = db_fetch_array($result)) {
$results[] = $data;
}
print biblio_csv_export($results);
}
}
function biblio_restore(& $csv_content, $mode = 'create') {
}
function biblio_csv_export($results) {
$csv = '';
if (!is_array($results)) {
$result_array[] = (array) $results;
}
else {
$result_array = $results;
}
$fieldnames = NULL;
foreach ((array)$result_array as $rec) {
if (empty($fieldnames)) {
$fieldnames = array_keys($rec);
$csv .= biblio_strcsv($fieldnames);
}
$csv .= biblio_strcsv($rec);
}
return $csv;
}
function biblio_strcsv($fields = array(), $delimiter = ',', $enclosure = '"') {
$str = '';
$escape_char = '\\';
foreach ($fields as $value) {
if (strpos($value, $delimiter) !== FALSE || strpos($value, $enclosure) !== FALSE || strpos($value, "\n") !== FALSE || strpos($value, "\r") !== FALSE || strpos($value, "\t") !== FALSE || strpos($value, ' ') !== FALSE) {
$str2 = $enclosure;
$escaped = 0;
$len = strlen($value);
for ($i = 0; $i < $len; $i++) {
if ($value[$i] == $escape_char) {
$escaped = 1;
}
else
if (!$escaped && $value[$i] == $enclosure) {
$str2 .= $enclosure;
}
else {
$escaped = 0;
}
$str2 .= $value[$i];
}
$str2 .= $enclosure;
$str .= $str2 . $delimiter;
}
else {
$str .= $value . $delimiter;
}
}
$str = substr($str, 0, -1);
$str .= "\n";
return $str;
}
function biblio_dump_db_data_for_pot() {
$query = "SELECT name, description FROM {biblio_types} ";
$result = db_query($query);
$strings = array();
foreach ($result as $type) {
$strings[] = $type->name;
if (!empty($type->description)) $strings[] = $type->description;
}
$query = "SELECT title, hint FROM {biblio_field_type_data} ";
$result = db_query($query);
foreach ($result as$type_data) {
$strings[] = $type_data->title;
if (!empty($type_data->hint)) $strings[] = $type_data->hint;
}
$query = "SELECT title, hint FROM {biblio_contributor_type_data} ";
$result = db_query($query);
foreach ($result as $type_data ) {
$strings[] = $type_data->title;
if (!empty($type_data->hint)) $type_data->hint;
}
$strings = array_unique($strings);
foreach ($strings as $string) {
$output .= "t(\"$string\"\);\n";
}
drupal_add_http_header('Content-Type', 'text/plain; charset=utf-8');
drupal_add_http_header('Content-Disposition', 'attachment; filename=biblio_db_values.pot');
print $output;
}