bla
This commit is contained in:
parent
5f8e0d57f7
commit
2a653295b4
4
app.js
4
app.js
@ -76,8 +76,8 @@ App.configure('production', function(){
|
||||
App.get('/', Routes.index);
|
||||
App.get('/signin', Routes.signIn);
|
||||
App.post('/signin', Routes.signInPost);
|
||||
//App.get('/signup', Routes.signUp);
|
||||
//App.post('/signup', Routes.signUpPost);
|
||||
App.get('/signup', Routes.signUp);
|
||||
App.post('/signup', Routes.signUpPost);
|
||||
App.get('/signout', Routes.signout)
|
||||
//App.get('/:name', Routes.view);
|
||||
App.get('/:name/edit', Routes.edit);
|
||||
|
89
db.js
89
db.js
@ -7,6 +7,7 @@ var Bookshelf = require('bookshelf');
|
||||
var async = require('async');
|
||||
var textile = require('textile-js')
|
||||
|
||||
var nodegit = require("nodegit")
|
||||
|
||||
var Markdown = textile //Markdown = require('js-markdown-extra').Markdown
|
||||
|
||||
@ -14,6 +15,7 @@ function orderize(files){
|
||||
var pageObjs = []
|
||||
for (var i = 0; i<files.length; i++){
|
||||
var file = files[i].name
|
||||
console.log(file)
|
||||
var order = parseInt(file.split("_")[0])
|
||||
if(!isNaN(order)) {
|
||||
var divId = file.split("_")[1].split(".markdown")[0]
|
||||
@ -53,6 +55,7 @@ function fullPath(files) {
|
||||
// Load a file, parse the title and generate the HTML
|
||||
exports.loadPage = function (name, callback) {
|
||||
var path = pathFromNameMd(name);
|
||||
console.log(path)
|
||||
// if (name != "home"){
|
||||
// return callback(null,{exists: false})
|
||||
|
||||
@ -60,19 +63,30 @@ exports.loadPage = function (name, callback) {
|
||||
|
||||
|
||||
FS.readdir(pathFromDir(), function(err,files){
|
||||
var pathfiles=fullPath(files)
|
||||
var fileObj = []
|
||||
var file_ext=".markdown"
|
||||
var clean_list = []
|
||||
for( var i = 0; i < files.length; i++){
|
||||
var f=files[i]
|
||||
if (f.slice( - file_ext.length, f.length ) === file_ext) {
|
||||
clean_list.push(f)
|
||||
}
|
||||
}
|
||||
files = clean_list
|
||||
|
||||
var pathfiles=fullPath(files)
|
||||
async.map(pathfiles, FS.readFile, function(err, data){
|
||||
for( var i = 0; i < files.length; i++){
|
||||
//console.log(files[i])
|
||||
try{
|
||||
var html = Markdown(data[i].toString().replace(/\r/g,""))
|
||||
fileObj.push({name : files[i], markdown:data[i].toString(), html:html} )
|
||||
}
|
||||
catch (err){ }
|
||||
|
||||
}
|
||||
try{
|
||||
console.log(f)
|
||||
var html = Markdown(data[i].toString().replace(/\r/g,""))
|
||||
fileObj.push({name : files[i], markdown:data[i].toString(), html:html} )
|
||||
}
|
||||
catch (err){ }
|
||||
console.log(err)
|
||||
}
|
||||
var torender = orderize(fileObj)
|
||||
|
||||
callback(null,{exists:true, torender:torender})
|
||||
|
||||
})
|
||||
@ -127,8 +141,61 @@ exports.editPage = function (name, callback) {
|
||||
};
|
||||
// Saving is simple. Just put the markdown in the file
|
||||
exports.savePage = function (name, value, callback) {
|
||||
var path = pathFromNameMd(name);
|
||||
FS.writeFile(path, value.replace(/\r/g,""), callback);
|
||||
var pathFile = pathFromNameMd(name);
|
||||
FS.writeFile(pathFile, value.replace(/\r/g,""), function (){
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
var repo;
|
||||
var index;
|
||||
var oid;
|
||||
|
||||
nodegit.Repository.open(Path.resolve(__dirname, "pages/.git"))
|
||||
.then(function(repoResult) {
|
||||
repo = repoResult;
|
||||
})
|
||||
.then(function (){
|
||||
return repo.openIndex();
|
||||
})
|
||||
.then(function(indexResult) {
|
||||
index = indexResult;
|
||||
return index.read(1);
|
||||
})
|
||||
.then(function() {
|
||||
// this file is in the root of the directory and doesn't need a full path
|
||||
return index.addByPath(name);
|
||||
})
|
||||
.then(function() {
|
||||
// this will write both files to the index
|
||||
return index.write();
|
||||
})
|
||||
.then(function() {
|
||||
return index.writeTree();
|
||||
})
|
||||
.then(function(oidResult) {
|
||||
oid = oidResult;
|
||||
return nodegit.Reference.nameToId(repo, "HEAD");
|
||||
})
|
||||
.then(function(head) {
|
||||
return repo.getCommit(head);
|
||||
})
|
||||
.then(function(parent) {
|
||||
var author = nodegit.Signature.create("BOB",
|
||||
"BOBHASHH", 123456789, 60);
|
||||
var committer = nodegit.Signature.create("ALICE COMMIT",
|
||||
"alicehash", 987654321, 90);
|
||||
|
||||
return repo.createCommit("HEAD", author, committer, "message", oid, [parent]);
|
||||
})
|
||||
.done(function(commitId) {
|
||||
console.log("New Commit: ", commitId);
|
||||
callback()
|
||||
});
|
||||
|
||||
})
|
||||
|
||||
};
|
||||
|
||||
var dbFile = Path.join(__dirname, 'app.db');
|
||||
|
193
db_git.js
Normal file
193
db_git.js
Normal file
@ -0,0 +1,193 @@
|
||||
var FS = require('fs');
|
||||
var Path = require('path');
|
||||
//var Markdown = require("marked");
|
||||
//var MdRenderer=require("./md-override")
|
||||
var knex = require("knex");
|
||||
var Bookshelf = require('bookshelf');
|
||||
var async = require('async');
|
||||
var textile = require('textile-js')
|
||||
|
||||
|
||||
var Markdown = textile //Markdown = require('js-markdown-extra').Markdown
|
||||
|
||||
function orderize(files){
|
||||
var pageObjs = []
|
||||
for (var i = 0; i<files.length; i++){
|
||||
var file = files[i].name
|
||||
var order = parseInt(file.split("_")[0])
|
||||
if(!isNaN(order)) {
|
||||
var divId = file.split("_")[1].split(".markdown")[0]
|
||||
var md = files[i].markdown
|
||||
var html = files[i].html
|
||||
pageObjs.push ({name: file.split(".markdown")[0], order:order, divId:divId, markdown:md, html:html })
|
||||
}
|
||||
}
|
||||
return pageObjs.sort(function(a,b){ return a.order - b.order });
|
||||
}
|
||||
|
||||
|
||||
// This function is used to map wiki page names to files
|
||||
// on the real filesystem.
|
||||
function pathFromNameMd(name) {
|
||||
return Path.join(__dirname, "pages", name + ".markdown");
|
||||
}
|
||||
|
||||
function pathFromName(name) {
|
||||
return Path.join(__dirname, "pages", name);
|
||||
}
|
||||
|
||||
|
||||
function pathFromDir() {
|
||||
return Path.join(__dirname, "pages");
|
||||
}
|
||||
|
||||
|
||||
function fullPath(files) {
|
||||
var paths = []
|
||||
for (var i= 0; i < files.length; i++)
|
||||
paths.push(pathFromName(files[i]))
|
||||
|
||||
return paths
|
||||
}
|
||||
|
||||
// Load a file, parse the title and generate the HTML
|
||||
exports.loadPage = function (name, callback) {
|
||||
var path = pathFromNameMd(name);
|
||||
// if (name != "home"){
|
||||
// return callback(null,{exists: false})
|
||||
|
||||
// }
|
||||
|
||||
|
||||
FS.readdir(pathFromDir(), function(err,files){
|
||||
var pathfiles=fullPath(files)
|
||||
var fileObj = []
|
||||
async.map(pathfiles, FS.readFile, function(err, data){
|
||||
for( var i = 0; i < files.length; i++){
|
||||
//console.log(files[i])
|
||||
try{
|
||||
var html = Markdown(data[i].toString().replace(/\r/g,""))
|
||||
fileObj.push({name : files[i], markdown:data[i].toString(), html:html} )
|
||||
}
|
||||
catch (err){ }
|
||||
|
||||
}
|
||||
var torender = orderize(fileObj)
|
||||
callback(null,{exists:true, torender:torender})
|
||||
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
};
|
||||
|
||||
exports.editPage = function (name, callback) {
|
||||
var path = pathFromNameMd(name);
|
||||
|
||||
|
||||
FS.readFile(path, 'utf8', function (err, markdown) {
|
||||
|
||||
var exists = true;
|
||||
if (err) {
|
||||
if (err.code === "ENOENT") {
|
||||
// Generate a placeholder body.
|
||||
markdown = "# " + name.replace(/_/g, " ") +
|
||||
"\n\n" + "This page does not exist yet.";
|
||||
exists = false;
|
||||
} else {
|
||||
// Forward on all other errors.
|
||||
return callback(err);
|
||||
}
|
||||
}
|
||||
|
||||
// Parse and render the markdown.
|
||||
/*var tree = Markdown.parse(markdown);
|
||||
var title = name;
|
||||
for (var i = 1, l = tree.length; i < l; i++) {
|
||||
if (tree[i] && tree[i][0] === "header") {
|
||||
title = tree[i][2];
|
||||
tree.splice(i, 1);
|
||||
break;
|
||||
}
|
||||
}*/
|
||||
|
||||
var html = Markdown(markdown);
|
||||
|
||||
callback(null, {
|
||||
name: name,
|
||||
title: null,
|
||||
exists: exists,
|
||||
markdown: unescape(markdown),
|
||||
html: html,
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
|
||||
};
|
||||
// Saving is simple. Just put the markdown in the file
|
||||
exports.savePage = function (name, value, callback) {
|
||||
var path = pathFromNameMd(name);
|
||||
FS.writeFile(path, value.replace(/\r/g,""), callback);
|
||||
nodegit.Repository.open("pages")
|
||||
.then(function (){
|
||||
return repo.openIndex();
|
||||
})
|
||||
.then(function(indexResult) {
|
||||
index = indexResult;
|
||||
return index.read(1);
|
||||
})
|
||||
.then(function() {
|
||||
// this file is in the root of the directory and doesn't need a full path
|
||||
return index.addByPath(name);
|
||||
})
|
||||
.then(function() {
|
||||
// this will write both files to the index
|
||||
return index.write();
|
||||
})
|
||||
.then(function() {
|
||||
return index.writeTree();
|
||||
})
|
||||
.then(function(oidResult) {
|
||||
oid = oidResult;
|
||||
return nodegit.Reference.nameToId(repo, "HEAD");
|
||||
})
|
||||
.then(function(head) {
|
||||
return repo.getCommit(head);
|
||||
})
|
||||
.then(function(parent) {
|
||||
var author = nodegit.Signature.create("BOB",
|
||||
"BOBHASHH", 123456789, 60);
|
||||
var committer = nodegit.Signature.create("ALICE COMMIT",
|
||||
"alicehash", 987654321, 90);
|
||||
|
||||
return repo.createCommit("HEAD", author, committer, "message", oid, [parent]);
|
||||
})
|
||||
.done(function(commitId) {
|
||||
console.log("New Commit: ", commitId);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
var dbFile = Path.join(__dirname, 'app.db');
|
||||
var DB = Bookshelf(knex({
|
||||
client: 'sqlite3',
|
||||
connection: { filename: dbFile }
|
||||
}));
|
||||
|
||||
FS.exists(dbFile, function(exists) {
|
||||
if (!exists) {
|
||||
console.log("create a new DB")
|
||||
DB.knex.schema.createTable('Users', function(table) {
|
||||
table.increments("id")
|
||||
table.string('username')
|
||||
table.string('password')
|
||||
}).then( function(){ console.log("DB created") })
|
||||
}
|
||||
})
|
||||
|
||||
exports.DB = DB;
|
||||
|
||||
|
||||
|
||||
|
@ -1,3 +1,3 @@
|
||||
h3. [THSF]:/less/is/more
|
||||
h3. [THSF]:/less/is/more/test/test/test
|
||||
|
||||
h4. 14-17/05/2015 Toulouse France
|
||||
|
@ -1,4 +1,4 @@
|
||||
h3. Conférences !/images/LOGO_THSF_128.png!
|
||||
h3. ConférenceZ !/images/LOGO_THSF_128.png!
|
||||
|
||||
| Célia IZOARD ("Revue Z":http://www.zite.fr/-Le-Journal)| L'informatique peut-elle casser des briques?| jeudi à partir de 16h|
|
||||
| Bernard STIEGLER ("Philosophe":https://fr.wikipedia.org/wiki/Bernard_Stiegler)|Un changement possible? | jeudi (suite)|
|
||||
|
@ -22,3 +22,4 @@ h3=. Datas
|
||||
* Photos
|
||||
"Photomaton du Paulla":http://photomaton.thsf.net/photomaton
|
||||
|
||||
h3=. Heloo
|
@ -11,6 +11,7 @@ var bcrypt = require('bcrypt-nodejs');
|
||||
// Load a page from the database and render as html
|
||||
exports.index = function (req, res, next) {
|
||||
DB.loadPage(req.params.name, function (err, page) {
|
||||
console.log("___" + page)
|
||||
if (err) return next(err);
|
||||
if (page.exists==false && !req.isAuthenticated())
|
||||
res.redirect('/')
|
||||
|
Loading…
Reference in New Issue
Block a user