SEO (search engine optimization) for organic search: SEO is a free method of SEM that uses a variety of techniques to help search engines understand what your website and webpages are about so they can deliver them to web searchers. These techniques include things like using titles, keywords and descriptions in a website and webpage's meta tags, providing relevant content on the topic, using various heading tags (i.e.

), and linking to and from quality online resources. 

Many page owners think that organic reach (the number of unique individuals who see your post pop up in their news feeds) is enough to make an impact. This was true in the first few years of Facebook but is no longer the case. Facebook, and many other social media networks is truly a pay-to-play network. Facebook, Twitter, Instagram, and LinkedIn are all on algorithmic feeds, meaning posts are shown to the user based on past behavior and preferences instead of in chronological order. Organic posts from your Facebook page only reach about 2% of your followers, and that number is dropping. Facebook recently announced that, in order to correct a past metrics error, it is changing the way it reports viewable impressions, and organic reach will be 20% lower on average when this change takes effect.
!function(n,t){function r(e,n){return Object.prototype.hasOwnProperty.call(e,n)}function i(e){return void 0===e}if(n){var o={},s=n.TraceKit,a=[].slice,u="?";o.noConflict=function(){return n.TraceKit=s,o},o.wrap=function(e){function n(){try{return e.apply(this,arguments)}catch(e){throw o.report(e),e}}return n},o.report=function(){function e(e){u(),h.push(e)}function t(e){for(var n=h.length-1;n>=0;--n)h[n]===e&&h.splice(n,1)}function i(e,n){var t=null;if(!n||o.collectWindowErrors){for(var i in h)if(r(h,i))try{h[i].apply(null,[e].concat(a.call(arguments,2)))}catch(e){t=e}if(t)throw t}}function s(e,n,t,r,s){var a=null;if(w)o.computeStackTrace.augmentStackTraceWithInitialElement(w,n,t,e),l();else if(s)a=o.computeStackTrace(s),i(a,!0);else{var u={url:n,line:t,column:r};u.func=o.computeStackTrace.guessFunctionName(u.url,u.line),u.context=o.computeStackTrace.gatherContext(u.url,u.line),a={mode:"onerror",message:e,stack:[u]},i(a,!0)}return!!f&&f.apply(this,arguments)}function u(){!0!==d&&(f=n.onerror,n.onerror=s,d=!0)}function l(){var e=w,n=p;p=null,w=null,m=null,i.apply(null,[e,!1].concat(n))}function c(e){if(w){if(m===e)return;l()}var t=o.computeStackTrace(e);throw w=t,m=e,p=a.call(arguments,1),n.setTimeout(function(){m===e&&l()},t.incomplete?2e3:0),e}var f,d,h=[],p=null,m=null,w=null;return c.subscribe=e,c.unsubscribe=t,c}(),o.computeStackTrace=function(){function e(e){if(!o.remoteFetching)return"";try{var t=function(){try{return new n.XMLHttpRequest}catch(e){return new n.ActiveXObject("Microsoft.XMLHTTP")}},r=t();return r.open("GET",e,!1),r.send(""),r.responseText}catch(e){return""}}function t(t){if("string"!=typeof t)return[];if(!r(j,t)){var i="",o="";try{o=n.document.domain}catch(e){}var s=/(.*)\:\/\/([^:\/]+)([:\d]*)\/{0,1}([\s\S]*)/.exec(t);s&&s[2]===o&&(i=e(t)),j[t]=i?i.split("\n"):[]}return j[t]}function s(e,n){var r,o=/function ([^(]*)\(([^)]*)\)/,s=/['"]?([0-9A-Za-z$_]+)['"]?\s*[:=]\s*(function|eval|new Function)/,a="",l=10,c=t(e);if(!c.length)return u;for(var f=0;f0?s:null}function l(e){return e.replace(/[\-\[\]{}()*+?.,\\\^$|#]/g,"\\$&")}function c(e){return l(e).replace("<","(?:<|<)").replace(">","(?:>|>)").replace("&","(?:&|&)").replace('"','(?:"|")').replace(/\s+/g,"\\s+")}function f(e,n){for(var r,i,o=0,s=n.length;or&&(i=s.exec(o[r]))?i.index:null}function h(e){if(!i(n&&n.document)){for(var t,r,o,s,a=[n.location.href],u=n.document.getElementsByTagName("script"),d=""+e,h=/^function(?:\s+([\w$]+))?\s*\(([\w\s,]*)\)\s*\{\s*(\S[\s\S]*\S)\s*\}\s*$/,p=/^function on([\w$]+)\s*\(event\)\s*\{\s*(\S[\s\S]*\S)\s*\}\s*$/,m=0;m]+)>|([^\)]+))\((.*)\))? in (.*):\s*$/i,o=n.split("\n"),u=[],l=0;l=0&&(v.line=g+x.substring(0,j).split("\n").length)}}}else if(o=d.exec(i[y])){var _=n.location.href.replace(/#.*$/,""),T=new RegExp(c(i[y+1])),E=f(T,[_]);v={url:_,func:"",args:[],line:E?E.line:o[1],column:null}}if(v){v.func||(v.func=s(v.url,v.line));var k=a(v.url,v.line),A=k?k[Math.floor(k.length/2)]:null;k&&A.replace(/^\s*/,"")===i[y+1].replace(/^\s*/,"")?v.context=k:v.context=[i[y+1]],h.push(v)}}return h.length?{mode:"multiline",name:e.name,message:i[0],stack:h}:null}function y(e,n,t,r){var i={url:n,line:t};if(i.url&&i.line){e.incomplete=!1,i.func||(i.func=s(i.url,i.line)),i.context||(i.context=a(i.url,i.line));var o=/ '([^']+)' /.exec(r);if(o&&(i.column=d(o[1],i.url,i.line)),e.stack.length>0&&e.stack[0].url===i.url){if(e.stack[0].line===i.line)return!1;if(!e.stack[0].line&&e.stack[0].func===i.func)return e.stack[0].line=i.line,e.stack[0].context=i.context,!1}return e.stack.unshift(i),e.partial=!0,!0}return e.incomplete=!0,!1}function v(e,n){for(var t,r,i,a=/function\s+([_$a-zA-Z\xA0-\uFFFF][_$a-zA-Z0-9\xA0-\uFFFF]*)?\s*\(/i,l=[],c={},f=!1,p=v.caller;p&&!f;p=p.caller)if(p!==g&&p!==o.report){if(r={url:null,func:u,args:[],line:null,column:null},p.name?r.func=p.name:(t=a.exec(p.toString()))&&(r.func=t[1]),"undefined"==typeof r.func)try{r.func=t.input.substring(0,t.input.indexOf("{"))}catch(e){}if(i=h(p)){r.url=i.url,r.line=i.line,r.func===u&&(r.func=s(r.url,r.line));var m=/ '([^']+)' /.exec(e.message||e.description);m&&(r.column=d(m[1],i.url,i.line))}c[""+p]?f=!0:c[""+p]=!0,l.push(r)}n&&l.splice(0,n);var w={mode:"callers",name:e.name,message:e.message,stack:l};return y(w,e.sourceURL||e.fileName,e.line||e.lineNumber,e.message||e.description),w}function g(e,n){var t=null;n=null==n?0:+n;try{if(t=m(e))return t}catch(e){if(x)throw e}try{if(t=p(e))return t}catch(e){if(x)throw e}try{if(t=w(e))return t}catch(e){if(x)throw e}try{if(t=v(e,n+1))return t}catch(e){if(x)throw e}return{mode:"failed"}}function b(e){e=1+(null==e?0:+e);try{throw new Error}catch(n){return g(n,e+1)}}var x=!1,j={};return g.augmentStackTraceWithInitialElement=y,g.guessFunctionName=s,g.gatherContext=a,g.ofCaller=b,g.getSource=t,g}(),o.extendToAsynchronousCallbacks=function(){var e=function(e){var t=n[e];n[e]=function(){var e=a.call(arguments),n=e[0];return"function"==typeof n&&(e[0]=o.wrap(n)),t.apply?t.apply(this,e):t(e[0],e[1])}};e("setTimeout"),e("setInterval")},o.remoteFetching||(o.remoteFetching=!0),o.collectWindowErrors||(o.collectWindowErrors=!0),(!o.linesOfContext||o.linesOfContext<1)&&(o.linesOfContext=11),void 0!==e&&e.exports&&n.module!==e?e.exports=o:"function"==typeof define&&define.amd?define("TraceKit",[],o):n.TraceKit=o}}("undefined"!=typeof window?window:global)},"./webpack-loaders/expose-loader/index.js?require!./shared/require-global.js":function(e,n,t){(function(n){e.exports=n.require=t("./shared/require-global.js")}).call(n,t("../../../lib/node_modules/webpack/buildin/global.js"))}});
To avoid undesirable content in the search indexes, webmasters can instruct spiders not to crawl certain files or directories through the standard robots.txt file in the root directory of the domain. Additionally, a page can be explicitly excluded from a search engine's database by using a meta tag specific to robots. When a search engine visits a site, the robots.txt located in the root directory is the first file crawled. The robots.txt file is then parsed and will instruct the robot as to which pages are not to be crawled. As a search engine crawler may keep a cached copy of this file, it may on occasion crawl pages a webmaster does not wish crawled. Pages typically prevented from being crawled include login specific pages such as shopping carts and user-specific content such as search results from internal searches. In March 2007, Google warned webmasters that they should prevent indexing of internal search results because those pages are considered search spam.[46]
Sometimes considered to be a part of SEM, social media sites like Twitter, YouTube, Facebook, and Delicious have search fields and also pass authority to sites through links. Making sure your content and links are placed (where necessary) on these social media sites can increase your influence in user search engine queries. SMM is a rapidly growing area of Internet marketing but to discuss it further is beyond the scope of this Guide.
Search engines may penalize sites they discover using black hat methods, either by reducing their rankings or eliminating their listings from their databases altogether. Such penalties can be applied either automatically by the search engines' algorithms, or by a manual site review. One example was the February 2006 Google removal of both BMW Germany and Ricoh Germany for use of deceptive practices.[53] Both companies, however, quickly apologized, fixed the offending pages, and were restored to Google's list.[54]
Remember when you used to rely solely on search engines for traffic? Remember when you worked on SEO and lived and died by your placement in Google? Were you #1? Assured success. Well, okay, maybe not assured. Success only came if the keywords were relevant to your site users, but it was the only real roadmap to generating site traffic and revenue.
×