3

I am trying to create a function that will parse a string and replace and URLs found with an HTML version of that URL.

For example, test.com would become <a href="http://www.test.com>http://www.test.com</a>

Here is the code that I am working with:

function parse_url (x)
    local domains = [[.ac.ad.ae.aero.af.ag.ai.al.am.an.ao.aq.ar.arpa.as.asia.at.au
       .aw.ax.az.ba.bb.bd.be.bf.bg.bh.bi.biz.bj.bm.bn.bo.br.bs.bt.bv.bw.by.bz.ca
       .cat.cc.cd.cf.cg.ch.ci.ck.cl.cm.cn.co.com.coop.cr.cs.cu.cv.cx.cy.cz.dd.de
       .dj.dk.dm.do.dz.ec.edu.ee.eg.eh.er.es.et.eu.fi.firm.fj.fk.fm.fo.fr.fx.ga
       .gb.gd.ge.gf.gh.gi.gl.gm.gn.gov.gp.gq.gr.gs.gt.gu.gw.gy.hk.hm.hn.hr.ht.hu
       .id.ie.il.im.in.info.int.io.iq.ir.is.it.je.jm.jo.jobs.jp.ke.kg.kh.ki.km.kn
       .kp.kr.kw.ky.kz.la.lb.lc.li.lk.lr.ls.lt.lu.lv.ly.ma.mc.md.me.mg.mh.mil.mk
       .ml.mm.mn.mo.mobi.mp.mq.mr.ms.mt.mu.museum.mv.mw.mx.my.mz.na.name.nato.nc
       .ne.net.nf.ng.ni.nl.no.nom.np.nr.nt.nu.nz.om.org.pa.pe.pf.pg.ph.pk.pl.pm
       .pn.post.pr.pro.ps.pt.pw.py.qa.re.ro.ru.rw.sa.sb.sc.sd.se.sg.sh.si.sj.sk
       .sl.sm.sn.so.sr.ss.st.store.su.sv.sy.sz.tc.td.tel.tf.tg.th.tj.tk.tl.tm.tn
       .to.tp.tr.travel.tt.tv.tw.tz.ua.ug.uk.um.us.uy.va.vc.ve.vg.vi.vn.vu.web.wf
       .ws.xxx.ye.yt.yu.za.zm.zr.zw]]
    local tlds = {}
    for tld in domains:gmatch'%w+' do
       tlds[tld] = true
    end
    local protocols = {[''] = 0, ['http://'] = 0, ['https://'] = 0, ['ftp://'] = 0}

    for pos, url, prot, subd, tld, colon, port, slash, path in x:gmatch
       '()(([%w_.~!*:@&+$/?%%#-]-)(%w[-.%w]*%.)(%w+)(:?)(%d*)(/?)([%w_.~!*:@&+$/?%%#=-]*))'
    do
       if protocols[prot:lower()] == (1 - #slash) * #path
          and (colon == '' or port ~= '' and port + 0 < 65536)
          and (tlds[tld:lower()] or tld:find'^%d+$' and subd:find'^%d+%.%d+%.%d+%.$'
          and math.max(tld, subd:match'^(%d+)%.(%d+)%.(%d+)%.$') < 256)
          and not subd:find'%W%W'
       then
          return string.gsub(x, url, "<a href=\"" .. url .. "\">" .. url .. "</a>")
       end
    end
end

There are a few issues that I have run into that I am hoping to resolve:

1) If the string x does not contain any URLs, it returns a nil result. I would like it to instead leave the string unchanged

2) It does not recognize an intranet link (http://test)

3) It does not recognize more than one level of subdomain (http://mysite.whatever.com works, http://mysite.whatever.co.uk does not)

4) It will recognize repeated instances of the same URL, but does not find subsequent URLs. For example, with the string http://www.test.com http://www.test.com http://www.whatever.com, http://www.test.com would be modified twice, but http://www.whatever.com would not be recognized at all.

What can I change to get this working properly?

Yu Hao
  • 119,891
  • 44
  • 235
  • 294

2 Answers2

1

This is the use case I originally wrote my sane_uri lpeg pattern for: https://github.com/daurnimator/lpeg_patterns#uri

example: (fill in your own html_escape function)

local lpeg = require "lpeg"
local alpha = lpeg.R("az", "AZ")
local sane_uri = require "lpeg_patterns.uri".sane_uri
local patt = lpeg.Cs((lpeg.Cg(lpeg.C(sane_uri))/function(u, t)
    if t.scheme == "http" or t.scheme == "https" then -- your scheme whitelist
        return "<a href=\""..html_escape(u).."\">"..html_escape(u).."</a>"
    end
end+(alpha^0*(1-alpha)))^0);
print(s:match("some http://example.com/ text"))

The above example uses the "Global substitution" method described in the lpeg manual, along with code similar to the "look for a pattern only at word boundaries" sample.

daurnimator
  • 4,091
  • 18
  • 34
0

Pure Lua solution (if you don't have lpeg on your target system)

-- all characters allowed to be inside URL according to RFC 3986 but without
-- comma, semicolon, apostrophe, equal, brackets and parentheses
-- (as they are used frequently as URL separators)
local Some_text_with_URLs = [[
   test
   test.com
   http://test.com
   http://test
   http://mysite.whatever.co.uk
   http://www.lua.org:80/manual/5.2/contents.html
   L.ua 5.2
   url=127.0.0.1:8080
   http://retracker.local/announce
   https://www.google.com/search?q=who+are+Lua+people&tbm=isch
   auth link: ftp://user:pwd@site.com/path - not recognized yet :(
]]

function ParseURL(text_with_URLs)    

    local domains = [[.ac.ad.ae.aero.af.ag.ai.al.am.an.ao.aq.ar.arpa.as.asia.at.au
       .aw.ax.az.ba.bb.bd.be.bf.bg.bh.bi.biz.bj.bm.bn.bo.br.bs.bt.bv.bw.by.bz.ca
       .cat.cc.cd.cf.cg.ch.ci.ck.cl.cm.cn.co.com.coop.cr.cs.cu.cv.cx.cy.cz.dd.de
       .dj.dk.dm.do.dz.ec.edu.ee.eg.eh.er.es.et.eu.fi.firm.fj.fk.fm.fo.fr.fx.ga
       .gb.gd.ge.gf.gh.gi.gl.gm.gn.gov.gp.gq.gr.gs.gt.gu.gw.gy.hk.hm.hn.hr.ht.hu
       .id.ie.il.im.in.info.int.io.iq.ir.is.it.je.jm.jo.jobs.jp.ke.kg.kh.ki.km.kn
       .kp.kr.kw.ky.kz.la.lb.lc.li.lk.lr.ls.lt.lu.lv.ly.ma.mc.md.me.mg.mh.mil.mk
       .ml.mm.mn.mo.mobi.mp.mq.mr.ms.mt.mu.museum.mv.mw.mx.my.mz.na.name.nato.nc
       .ne.net.nf.ng.ni.nl.no.nom.np.nr.nt.nu.nz.om.org.pa.pe.pf.pg.ph.pk.pl.pm
       .pn.post.pr.pro.ps.pt.pw.py.qa.re.ro.ru.rw.sa.sb.sc.sd.se.sg.sh.si.sj.sk
       .sl.sm.sn.so.sr.ss.st.store.su.sv.sy.sz.tc.td.tel.tf.tg.th.tj.tk.tl.tm.tn
       .to.tp.tr.travel.tt.tv.tw.tz.ua.ug.uk.um.us.uy.va.vc.ve.vg.vi.vn.vu.web.wf
       .ws.xxx.ye.yt.yu.za.zm.zr.zw]]
    local tlds = {}
    for tld in domains:gmatch'%w+' do
       tlds[tld] = true
    end
    local function max4(a,b,c,d) return math.max(a+0, b+0, c+0, d+0) end
    local protocols = {[''] = 0, ['http://'] = 0, ['https://'] = 0, ['ftp://'] = 0}
    local finished, positions = {}, {}

    for pos_start, url, prot, subd, tld, colon, port, slash, path, pos_finish in
       text_with_URLs:gmatch'()(([%w_.~!*:@&+$/?%%#-]-)(%w[-.%w]*%.)(%w+)(:?)(%d*)(/?)([%w_.~!*:@&+$/?%%#=-]*))()'
    do
       if protocols[prot:lower()] == (1 - #slash) * #path and not subd:find'%W%W'
          and (colon == '' or port ~= '' and port + 0 < 65536)
          and (tlds[tld:lower()] or tld:find'^%d+$' and subd:find'^%d+%.%d+%.%d+%.$'
          and max4(tld, subd:match'^(%d+)%.(%d+)%.(%d+)%.$') < 256)
       then
          finished[pos_start] = pos_finish
          table.insert(positions, pos_start)
       end
    end

    for pos_start, url, prot, dom, colon, port, slash, path, pos_finish in
       text_with_URLs:gmatch'()((%f[%w]%a+://)(%w[-.%w]*)(:?)(%d*)(/?)([%w_.~!*:@&+$/?%%#=-]*))()'
    do
       if not finished[pos_start] and not (dom..'.'):find'%W%W'
          and protocols[prot:lower()] == (1 - #slash) * #path
          and (colon == '' or port ~= '' and port + 0 < 65536)
       then
          finished[pos_start] = pos_finish
          table.insert(positions, pos_start)
       end
    end

    local new_text = {}

    local function append_text(text)
       table.insert(new_text, text)
    end

    local function append_URL(URL)
       table.insert(new_text, '<a href="'..(URL:find'://' and URL or 'http://'..URL)..'">'..URL..'</a>')
    end

    table.sort(positions)
    local last_pos = 1
    for _, pos_start in ipairs(positions) do
       append_text(text_with_URLs:sub(last_pos, pos_start - 1))
       last_pos = finished[pos_start]
       append_URL(text_with_URLs:sub(pos_start, last_pos - 1))
    end
    append_text(text_with_URLs:sub(last_pos))

    return table.concat(new_text)
end

print(ParseURL(Some_text_with_URLs))
Egor Skriptunoff
  • 23,359
  • 2
  • 34
  • 64